diff --git "a/attnserver.run_attnserver.slurm.sh.343199.err.log" "b/attnserver.run_attnserver.slurm.sh.343199.err.log" --- "a/attnserver.run_attnserver.slurm.sh.343199.err.log" +++ "b/attnserver.run_attnserver.slurm.sh.343199.err.log" @@ -4104,3 +4104,18310 @@ W0621 21:07:06.326000 695391 site-packages/torch/distributed/run.py:766] ******* warnings.warn( /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. warnings.warn( +[rank2]: Traceback (most recent call last): +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank2]: pretrain( +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank2]: iteration, num_floating_point_operations_so_far = train( +[rank2]: ^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank2]: ) = train_step( +[rank2]: ^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank2]: losses_reduced = forward_backward_func( +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank2]: output_tensor, num_tokens = forward_step( +[rank2]: ^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank2]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank2]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank2]: batch = next(global_batches) +[rank2]: ^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank2]: attention_mask = torch.ones( +[rank2]: ^^^^^^^^^^^ +[rank2]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 GiB. GPU 2 has a total capacity of 139.81 GiB of which 137.31 GiB is free. Including non-PyTorch memory, this process has 2.50 GiB memory in use. Of the allocated memory 980.49 MiB is allocated by PyTorch, and 43.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank7]: Traceback (most recent call last): +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank7]: pretrain( +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank7]: iteration, num_floating_point_operations_so_far = train( +[rank7]: ^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank7]: ) = train_step( +[rank7]: ^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank7]: losses_reduced = forward_backward_func( +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank7]: output_tensor, num_tokens = forward_step( +[rank7]: ^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank7]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank7]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank7]: batch = next(global_batches) +[rank7]: ^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank7]: attention_mask = torch.ones( +[rank7]: ^^^^^^^^^^^ +[rank7]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 GiB. GPU 7 has a total capacity of 139.81 GiB of which 137.29 GiB is free. Including non-PyTorch memory, this process has 2.51 GiB memory in use. Of the allocated memory 980.49 MiB is allocated by PyTorch, and 43.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank20]: Traceback (most recent call last): +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank20]: pretrain( +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank20]: iteration, num_floating_point_operations_so_far = train( +[rank20]: ^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank20]: ) = train_step( +[rank20]: ^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank20]: losses_reduced = forward_backward_func( +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: Traceback (most recent call last): +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank26]: pretrain( +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank26]: iteration, num_floating_point_operations_so_far = train( +[rank26]: ^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank26]: ) = train_step( +[rank26]: ^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank26]: losses_reduced = forward_backward_func( +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank20]: output_tensor, num_tokens = forward_step( +[rank20]: ^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank20]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank20]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: Traceback (most recent call last): +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank5]: pretrain( +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank5]: iteration, num_floating_point_operations_so_far = train( +[rank5]: ^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank5]: ) = train_step( +[rank5]: ^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank5]: losses_reduced = forward_backward_func( +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank26]: output_tensor, num_tokens = forward_step( +[rank26]: ^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank26]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank26]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: Traceback (most recent call last): +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank11]: pretrain( +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank11]: iteration, num_floating_point_operations_so_far = train( +[rank11]: ^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank11]: ) = train_step( +[rank11]: ^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank11]: losses_reduced = forward_backward_func( +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank20]: batch = next(global_batches) +[rank20]: ^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank20]: attention_mask = torch.ones( +[rank20]: ^^^^^^^^^^^ +[rank20]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 GiB. GPU 4 has a total capacity of 139.81 GiB of which 137.29 GiB is free. Including non-PyTorch memory, this process has 2.52 GiB memory in use. Of the allocated memory 980.49 MiB is allocated by PyTorch, and 43.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank5]: output_tensor, num_tokens = forward_step( +[rank5]: ^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank5]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank5]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank5]: batch = next(global_batches) +[rank5]: ^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank26]: batch = next(global_batches) +[rank26]: ^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank26]: attention_mask = torch.ones( +[rank26]: ^^^^^^^^^^^ +[rank26]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 GiB. GPU 2 has a total capacity of 139.81 GiB of which 137.29 GiB is free. Including non-PyTorch memory, this process has 2.51 GiB memory in use. Of the allocated memory 980.49 MiB is allocated by PyTorch, and 43.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank11]: output_tensor, num_tokens = forward_step( +[rank11]: ^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank11]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank11]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank5]: attention_mask = torch.ones( +[rank5]: ^^^^^^^^^^^ +[rank5]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 GiB. GPU 5 has a total capacity of 139.81 GiB of which 137.29 GiB is free. Including non-PyTorch memory, this process has 2.51 GiB memory in use. Of the allocated memory 980.49 MiB is allocated by PyTorch, and 43.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank30]: Traceback (most recent call last): +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank30]: pretrain( +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank30]: iteration, num_floating_point_operations_so_far = train( +[rank30]: ^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank30]: ) = train_step( +[rank30]: ^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank30]: losses_reduced = forward_backward_func( +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank11]: batch = next(global_batches) +[rank11]: ^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank11]: attention_mask = torch.ones( +[rank11]: ^^^^^^^^^^^ +[rank11]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 GiB. GPU 3 has a total capacity of 139.81 GiB of which 137.29 GiB is free. Including non-PyTorch memory, this process has 2.52 GiB memory in use. Of the allocated memory 980.49 MiB is allocated by PyTorch, and 43.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank30]: output_tensor, num_tokens = forward_step( +[rank30]: ^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank30]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank30]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: Traceback (most recent call last): +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank15]: pretrain( +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank15]: iteration, num_floating_point_operations_so_far = train( +[rank15]: ^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank15]: ) = train_step( +[rank15]: ^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank15]: losses_reduced = forward_backward_func( +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank30]: batch = next(global_batches) +[rank30]: ^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank30]: attention_mask = torch.ones( +[rank30]: ^^^^^^^^^^^ +[rank30]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 GiB. GPU 6 has a total capacity of 139.81 GiB of which 137.29 GiB is free. Including non-PyTorch memory, this process has 2.51 GiB memory in use. Of the allocated memory 980.49 MiB is allocated by PyTorch, and 43.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank15]: output_tensor, num_tokens = forward_step( +[rank15]: ^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank15]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank15]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank15]: batch = next(global_batches) +[rank15]: ^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank15]: attention_mask = torch.ones( +[rank15]: ^^^^^^^^^^^ +[rank19]: Traceback (most recent call last): +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank19]: pretrain( +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank19]: iteration, num_floating_point_operations_so_far = train( +[rank19]: ^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank19]: ) = train_step( +[rank19]: ^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank19]: losses_reduced = forward_backward_func( +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 GiB. GPU 7 has a total capacity of 139.81 GiB of which 137.29 GiB is free. Including non-PyTorch memory, this process has 2.52 GiB memory in use. Of the allocated memory 980.49 MiB is allocated by PyTorch, and 43.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank19]: output_tensor, num_tokens = forward_step( +[rank19]: ^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank19]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank19]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: Traceback (most recent call last): +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank8]: pretrain( +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank8]: iteration, num_floating_point_operations_so_far = train( +[rank8]: ^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank8]: ) = train_step( +[rank8]: ^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank8]: losses_reduced = forward_backward_func( +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank19]: batch = next(global_batches) +[rank19]: ^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank19]: attention_mask = torch.ones( +[rank19]: ^^^^^^^^^^^ +[rank19]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 GiB. GPU 3 has a total capacity of 139.81 GiB of which 137.27 GiB is free. Including non-PyTorch memory, this process has 2.53 GiB memory in use. Of the allocated memory 980.49 MiB is allocated by PyTorch, and 43.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank31]: Traceback (most recent call last): +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank31]: pretrain( +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank31]: iteration, num_floating_point_operations_so_far = train( +[rank31]: ^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank31]: ) = train_step( +[rank31]: ^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank31]: losses_reduced = forward_backward_func( +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: output_tensor, num_tokens = forward_step( +[rank8]: ^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank8]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank8]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank8]: batch = next(global_batches) +[rank8]: ^^^^^^^^^^^^^^^^^^^^ +[rank3]: Traceback (most recent call last): +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank3]: pretrain( +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank3]: iteration, num_floating_point_operations_so_far = train( +[rank3]: ^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank3]: ) = train_step( +[rank3]: ^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank3]: losses_reduced = forward_backward_func( +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank31]: output_tensor, num_tokens = forward_step( +[rank31]: ^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank31]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank31]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank8]: attention_mask = torch.ones( +[rank8]: ^^^^^^^^^^^ +[rank8]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 GiB. GPU 0 has a total capacity of 139.81 GiB of which 137.27 GiB is free. Including non-PyTorch memory, this process has 2.53 GiB memory in use. Of the allocated memory 980.49 MiB is allocated by PyTorch, and 43.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank3]: output_tensor, num_tokens = forward_step( +[rank3]: ^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank3]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank3]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank3]: batch = next(global_batches) +[rank3]: ^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank31]: batch = next(global_batches) +[rank31]: ^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank31]: attention_mask = torch.ones( +[rank31]: ^^^^^^^^^^^ +[rank31]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 GiB. GPU 7 has a total capacity of 139.81 GiB of which 137.31 GiB is free. Including non-PyTorch memory, this process has 2.50 GiB memory in use. Of the allocated memory 980.49 MiB is allocated by PyTorch, and 43.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank3]: attention_mask = torch.ones( +[rank3]: ^^^^^^^^^^^ +[rank3]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 GiB. GPU 3 has a total capacity of 139.81 GiB of which 137.29 GiB is free. Including non-PyTorch memory, this process has 2.51 GiB memory in use. Of the allocated memory 980.49 MiB is allocated by PyTorch, and 43.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank17]: Traceback (most recent call last): +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank17]: pretrain( +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank17]: iteration, num_floating_point_operations_so_far = train( +[rank17]: ^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank17]: ) = train_step( +[rank17]: ^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank17]: losses_reduced = forward_backward_func( +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: Traceback (most recent call last): +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank6]: pretrain( +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank6]: iteration, num_floating_point_operations_so_far = train( +[rank6]: ^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank6]: ) = train_step( +[rank6]: ^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank6]: losses_reduced = forward_backward_func( +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank17]: output_tensor, num_tokens = forward_step( +[rank17]: ^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank17]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank17]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: output_tensor, num_tokens = forward_step( +[rank6]: ^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank6]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank6]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank6]: batch = next(global_batches) +[rank6]: ^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank17]: batch = next(global_batches) +[rank17]: ^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank17]: attention_mask = torch.ones( +[rank17]: ^^^^^^^^^^^ +[rank17]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 GiB. GPU 1 has a total capacity of 139.81 GiB of which 137.27 GiB is free. Including non-PyTorch memory, this process has 2.53 GiB memory in use. Of the allocated memory 980.49 MiB is allocated by PyTorch, and 43.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank6]: attention_mask = torch.ones( +[rank6]: ^^^^^^^^^^^ +[rank6]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 GiB. GPU 6 has a total capacity of 139.81 GiB of which 137.31 GiB is free. Including non-PyTorch memory, this process has 2.50 GiB memory in use. Of the allocated memory 980.49 MiB is allocated by PyTorch, and 43.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank16]: Traceback (most recent call last): +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank16]: pretrain( +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank16]: iteration, num_floating_point_operations_so_far = train( +[rank16]: ^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank16]: ) = train_step( +[rank16]: ^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank16]: losses_reduced = forward_backward_func( +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank16]: output_tensor, num_tokens = forward_step( +[rank16]: ^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank16]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank16]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank16]: batch = next(global_batches) +[rank16]: ^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank16]: attention_mask = torch.ones( +[rank16]: ^^^^^^^^^^^ +[rank16]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 GiB. GPU 0 has a total capacity of 139.81 GiB of which 137.29 GiB is free. Including non-PyTorch memory, this process has 2.52 GiB memory in use. Of the allocated memory 980.49 MiB is allocated by PyTorch, and 43.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank25]: Traceback (most recent call last): +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank25]: pretrain( +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank25]: iteration, num_floating_point_operations_so_far = train( +[rank25]: ^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank25]: ) = train_step( +[rank25]: ^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank25]: losses_reduced = forward_backward_func( +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: Traceback (most recent call last): +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank1]: pretrain( +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank1]: iteration, num_floating_point_operations_so_far = train( +[rank1]: ^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank1]: ) = train_step( +[rank1]: ^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank1]: losses_reduced = forward_backward_func( +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank25]: output_tensor, num_tokens = forward_step( +[rank25]: ^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank25]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank25]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: output_tensor, num_tokens = forward_step( +[rank1]: ^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank1]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank1]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank1]: batch = next(global_batches) +[rank1]: ^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank25]: batch = next(global_batches) +[rank25]: ^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank25]: attention_mask = torch.ones( +[rank25]: ^^^^^^^^^^^ +[rank25]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 GiB. GPU 1 has a total capacity of 139.81 GiB of which 137.31 GiB is free. Including non-PyTorch memory, this process has 2.50 GiB memory in use. Of the allocated memory 980.49 MiB is allocated by PyTorch, and 43.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank12]: Traceback (most recent call last): +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank12]: pretrain( +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank12]: iteration, num_floating_point_operations_so_far = train( +[rank12]: ^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank12]: ) = train_step( +[rank12]: ^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank12]: losses_reduced = forward_backward_func( +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank1]: attention_mask = torch.ones( +[rank1]: ^^^^^^^^^^^ +[rank1]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 GiB. GPU 1 has a total capacity of 139.81 GiB of which 137.29 GiB is free. Including non-PyTorch memory, this process has 2.51 GiB memory in use. Of the allocated memory 980.49 MiB is allocated by PyTorch, and 43.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank12]: output_tensor, num_tokens = forward_step( +[rank12]: ^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank12]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank12]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: Traceback (most recent call last): +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank4]: pretrain( +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank4]: iteration, num_floating_point_operations_so_far = train( +[rank4]: ^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank4]: ) = train_step( +[rank4]: ^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank4]: losses_reduced = forward_backward_func( +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank12]: batch = next(global_batches) +[rank12]: ^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank12]: attention_mask = torch.ones( +[rank12]: ^^^^^^^^^^^ +[rank12]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 GiB. GPU 4 has a total capacity of 139.81 GiB of which 137.27 GiB is free. Including non-PyTorch memory, this process has 2.53 GiB memory in use. Of the allocated memory 980.49 MiB is allocated by PyTorch, and 43.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank4]: output_tensor, num_tokens = forward_step( +[rank4]: ^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank4]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank4]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank4]: batch = next(global_batches) +[rank4]: ^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank4]: attention_mask = torch.ones( +[rank4]: ^^^^^^^^^^^ +[rank4]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 GiB. GPU 4 has a total capacity of 139.81 GiB of which 137.31 GiB is free. Including non-PyTorch memory, this process has 2.50 GiB memory in use. Of the allocated memory 980.49 MiB is allocated by PyTorch, and 43.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank28]: Traceback (most recent call last): +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank28]: pretrain( +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank28]: iteration, num_floating_point_operations_so_far = train( +[rank28]: ^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank28]: ) = train_step( +[rank28]: ^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank28]: losses_reduced = forward_backward_func( +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: Traceback (most recent call last): +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank0]: pretrain( +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank0]: iteration, num_floating_point_operations_so_far = train( +[rank0]: ^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank0]: ) = train_step( +[rank0]: ^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank0]: losses_reduced = forward_backward_func( +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank28]: output_tensor, num_tokens = forward_step( +[rank28]: ^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank28]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank28]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: output_tensor, num_tokens = forward_step( +[rank0]: ^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank0]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank0]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank0]: batch = next(global_batches) +[rank0]: ^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank28]: batch = next(global_batches) +[rank28]: ^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank28]: attention_mask = torch.ones( +[rank28]: ^^^^^^^^^^^ +[rank28]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 GiB. GPU 4 has a total capacity of 139.81 GiB of which 137.29 GiB is free. Including non-PyTorch memory, this process has 2.51 GiB memory in use. Of the allocated memory 980.49 MiB is allocated by PyTorch, and 43.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank14]: Traceback (most recent call last): +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank14]: pretrain( +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank14]: iteration, num_floating_point_operations_so_far = train( +[rank14]: ^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank14]: ) = train_step( +[rank14]: ^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank14]: losses_reduced = forward_backward_func( +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank0]: attention_mask = torch.ones( +[rank0]: ^^^^^^^^^^^ +[rank0]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 GiB. GPU 0 has a total capacity of 139.81 GiB of which 137.31 GiB is free. Including non-PyTorch memory, this process has 2.50 GiB memory in use. Of the allocated memory 980.49 MiB is allocated by PyTorch, and 43.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank27]: Traceback (most recent call last): +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank27]: pretrain( +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank27]: iteration, num_floating_point_operations_so_far = train( +[rank27]: ^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank27]: ) = train_step( +[rank27]: ^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank27]: losses_reduced = forward_backward_func( +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank14]: output_tensor, num_tokens = forward_step( +[rank14]: ^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank14]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank14]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank27]: output_tensor, num_tokens = forward_step( +[rank27]: ^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank14]: batch = next(global_batches) +[rank14]: ^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank14]: attention_mask = torch.ones( +[rank14]: ^^^^^^^^^^^ +[rank14]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 GiB. GPU 6 has a total capacity of 139.81 GiB of which 137.27 GiB is free. Including non-PyTorch memory, this process has 2.53 GiB memory in use. Of the allocated memory 980.49 MiB is allocated by PyTorch, and 43.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank27]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank27]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank27]: batch = next(global_batches) +[rank27]: ^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank27]: attention_mask = torch.ones( +[rank27]: ^^^^^^^^^^^ +[rank9]: Traceback (most recent call last): +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank9]: pretrain( +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank9]: iteration, num_floating_point_operations_so_far = train( +[rank9]: ^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank9]: ) = train_step( +[rank9]: ^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank9]: losses_reduced = forward_backward_func( +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank27]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 GiB. GPU 3 has a total capacity of 139.81 GiB of which 137.31 GiB is free. Including non-PyTorch memory, this process has 2.50 GiB memory in use. Of the allocated memory 980.49 MiB is allocated by PyTorch, and 43.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank9]: output_tensor, num_tokens = forward_step( +[rank9]: ^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank9]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank9]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank9]: batch = next(global_batches) +[rank9]: ^^^^^^^^^^^^^^^^^^^^ +[rank23]: Traceback (most recent call last): +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank23]: pretrain( +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank23]: iteration, num_floating_point_operations_so_far = train( +[rank23]: ^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank23]: ) = train_step( +[rank23]: ^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank23]: losses_reduced = forward_backward_func( +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank9]: attention_mask = torch.ones( +[rank9]: ^^^^^^^^^^^ +[rank9]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 GiB. GPU 1 has a total capacity of 139.81 GiB of which 137.29 GiB is free. Including non-PyTorch memory, this process has 2.52 GiB memory in use. Of the allocated memory 980.49 MiB is allocated by PyTorch, and 43.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank10]: Traceback (most recent call last): +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank10]: pretrain( +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank23]: output_tensor, num_tokens = forward_step( +[rank23]: ^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank23]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank23]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank10]: iteration, num_floating_point_operations_so_far = train( +[rank10]: ^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank10]: ) = train_step( +[rank10]: ^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank10]: losses_reduced = forward_backward_func( +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank10]: output_tensor, num_tokens = forward_step( +[rank10]: ^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank23]: batch = next(global_batches) +[rank23]: ^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank23]: attention_mask = torch.ones( +[rank23]: ^^^^^^^^^^^ +[rank23]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 GiB. GPU 7 has a total capacity of 139.81 GiB of which 137.27 GiB is free. Including non-PyTorch memory, this process has 2.53 GiB memory in use. Of the allocated memory 980.49 MiB is allocated by PyTorch, and 43.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank18]: Traceback (most recent call last): +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank18]: pretrain( +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank18]: iteration, num_floating_point_operations_so_far = train( +[rank18]: ^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank18]: ) = train_step( +[rank18]: ^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank18]: losses_reduced = forward_backward_func( +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: Traceback (most recent call last): +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank24]: pretrain( +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank24]: iteration, num_floating_point_operations_so_far = train( +[rank24]: ^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank24]: ) = train_step( +[rank24]: ^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank24]: losses_reduced = forward_backward_func( +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank10]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank10]: batch = next(global_batches) +[rank10]: ^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank10]: attention_mask = torch.ones( +[rank10]: ^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank18]: output_tensor, num_tokens = forward_step( +[rank18]: ^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank18]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank18]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank24]: output_tensor, num_tokens = forward_step( +[rank24]: ^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank24]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank24]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 GiB. GPU 2 has a total capacity of 139.81 GiB of which 137.27 GiB is free. Including non-PyTorch memory, this process has 2.53 GiB memory in use. Of the allocated memory 980.49 MiB is allocated by PyTorch, and 43.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank18]: batch = next(global_batches) +[rank18]: ^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank18]: attention_mask = torch.ones( +[rank18]: ^^^^^^^^^^^ +[rank18]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 GiB. GPU 2 has a total capacity of 139.81 GiB of which 137.29 GiB is free. Including non-PyTorch memory, this process has 2.52 GiB memory in use. Of the allocated memory 980.49 MiB is allocated by PyTorch, and 43.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank24]: batch = next(global_batches) +[rank24]: ^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank24]: attention_mask = torch.ones( +[rank24]: ^^^^^^^^^^^ +[rank24]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 GiB. GPU 0 has a total capacity of 139.81 GiB of which 137.29 GiB is free. Including non-PyTorch memory, this process has 2.51 GiB memory in use. Of the allocated memory 980.49 MiB is allocated by PyTorch, and 43.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank13]: Traceback (most recent call last): +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank13]: pretrain( +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank13]: iteration, num_floating_point_operations_so_far = train( +[rank13]: ^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank13]: ) = train_step( +[rank13]: ^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank13]: losses_reduced = forward_backward_func( +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: Traceback (most recent call last): +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank21]: pretrain( +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank21]: iteration, num_floating_point_operations_so_far = train( +[rank21]: ^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank21]: ) = train_step( +[rank21]: ^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank21]: losses_reduced = forward_backward_func( +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: Traceback (most recent call last): +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank29]: pretrain( +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank29]: iteration, num_floating_point_operations_so_far = train( +[rank29]: ^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank29]: ) = train_step( +[rank29]: ^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank29]: losses_reduced = forward_backward_func( +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank13]: output_tensor, num_tokens = forward_step( +[rank13]: ^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank13]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank13]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank21]: output_tensor, num_tokens = forward_step( +[rank21]: ^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank21]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank21]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank29]: output_tensor, num_tokens = forward_step( +[rank29]: ^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank29]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank29]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank13]: batch = next(global_batches) +[rank13]: ^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank13]: attention_mask = torch.ones( +[rank13]: ^^^^^^^^^^^ +[rank13]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 GiB. GPU 5 has a total capacity of 139.81 GiB of which 137.29 GiB is free. Including non-PyTorch memory, this process has 2.52 GiB memory in use. Of the allocated memory 980.49 MiB is allocated by PyTorch, and 43.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank21]: batch = next(global_batches) +[rank21]: ^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank21]: attention_mask = torch.ones( +[rank21]: ^^^^^^^^^^^ +[rank21]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 GiB. GPU 5 has a total capacity of 139.81 GiB of which 137.27 GiB is free. Including non-PyTorch memory, this process has 2.53 GiB memory in use. Of the allocated memory 980.49 MiB is allocated by PyTorch, and 43.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank29]: batch = next(global_batches) +[rank29]: ^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank29]: attention_mask = torch.ones( +[rank29]: ^^^^^^^^^^^ +[rank29]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 GiB. GPU 5 has a total capacity of 139.81 GiB of which 137.31 GiB is free. Including non-PyTorch memory, this process has 2.50 GiB memory in use. Of the allocated memory 980.49 MiB is allocated by PyTorch, and 43.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank22]: Traceback (most recent call last): +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank22]: pretrain( +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank22]: iteration, num_floating_point_operations_so_far = train( +[rank22]: ^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank22]: ) = train_step( +[rank22]: ^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank22]: losses_reduced = forward_backward_func( +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank22]: output_tensor, num_tokens = forward_step( +[rank22]: ^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank22]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank22]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank22]: batch = next(global_batches) +[rank22]: ^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank22]: attention_mask = torch.ones( +[rank22]: ^^^^^^^^^^^ +[rank22]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 GiB. GPU 6 has a total capacity of 139.81 GiB of which 137.29 GiB is free. Including non-PyTorch memory, this process has 2.52 GiB memory in use. Of the allocated memory 980.49 MiB is allocated by PyTorch, and 43.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank11]:[W621 21:07:38.980176487 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank7]:[W621 21:07:38.251756992 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank31]:[W621 21:07:39.516555321 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank5]:[W621 21:07:39.321466707 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank10]:[W621 21:07:39.201464898 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank29]:[W621 21:07:39.526902946 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank26]:[W621 21:07:39.534089548 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank14]:[W621 21:07:39.212090202 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank4]:[W621 21:07:39.351299314 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank23]:[W621 21:07:39.944528702 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank3]:[W621 21:07:39.356513280 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank13]:[W621 21:07:39.255033710 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank15]:[W621 21:07:39.284787501 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank12]:[W621 21:07:39.285956685 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank6]:[W621 21:07:39.413552009 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank27]:[W621 21:07:39.615827540 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank1]:[W621 21:07:39.419009619 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank18]:[W621 21:07:39.012323386 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank17]:[W621 21:07:39.025777225 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank20]:[W621 21:07:39.036793485 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank9]:[W621 21:07:39.335837191 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank25]:[W621 21:07:39.658797402 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank2]:[W621 21:07:39.475416588 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank22]:[W621 21:07:39.073762782 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank19]:[W621 21:07:39.084014931 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank28]:[W621 21:07:39.699818766 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank30]:[W621 21:07:39.766946260 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank21]:[W621 21:07:39.267374743 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +W0621 21:07:39.862000 695391 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 695481 closing signal SIGTERM +W0621 21:07:39.864000 695391 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 695482 closing signal SIGTERM +W0621 21:07:39.865000 695391 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 695483 closing signal SIGTERM +W0621 21:07:39.865000 695391 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 695484 closing signal SIGTERM +W0621 21:07:39.865000 695391 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 695485 closing signal SIGTERM +W0621 21:07:39.866000 695391 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 695487 closing signal SIGTERM +W0621 21:07:39.866000 695391 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 695488 closing signal SIGTERM +W0621 21:07:39.954000 1959055 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 1959125 closing signal SIGTERM +W0621 21:07:39.957000 1959055 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 1959126 closing signal SIGTERM +W0621 21:07:39.958000 1959055 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 1959127 closing signal SIGTERM +W0621 21:07:39.959000 1959055 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 1959128 closing signal SIGTERM +W0621 21:07:39.959000 1959055 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 1959129 closing signal SIGTERM +W0621 21:07:39.959000 1959055 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 1959130 closing signal SIGTERM +W0621 21:07:39.959000 1959055 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 1959132 closing signal SIGTERM +W0621 21:07:39.975000 2155103 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2155173 closing signal SIGTERM +W0621 21:07:39.977000 2155103 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2155174 closing signal SIGTERM +W0621 21:07:39.978000 2155103 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2155176 closing signal SIGTERM +W0621 21:07:39.979000 2155103 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2155177 closing signal SIGTERM +W0621 21:07:39.980000 2155103 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2155178 closing signal SIGTERM +W0621 21:07:39.980000 2155103 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2155179 closing signal SIGTERM +W0621 21:07:39.980000 2155103 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2155180 closing signal SIGTERM +W0621 21:07:40.038000 1678037 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 1678107 closing signal SIGTERM +W0621 21:07:40.041000 1678037 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 1678108 closing signal SIGTERM +W0621 21:07:40.042000 1678037 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 1678109 closing signal SIGTERM +W0621 21:07:40.043000 1678037 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 1678110 closing signal SIGTERM +W0621 21:07:40.044000 1678037 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 1678111 closing signal SIGTERM +W0621 21:07:40.044000 1678037 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 1678112 closing signal SIGTERM +W0621 21:07:40.044000 1678037 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 1678113 closing signal SIGTERM +E0621 21:07:40.932000 695391 site-packages/torch/distributed/elastic/multiprocessing/api.py:874] failed (exitcode: 1) local_rank: 5 (pid: 695486) of binary: /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/bin/python3 +Traceback (most recent call last): + File "", line 198, in _run_module_as_main + File "", line 88, in _run_code + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 207, in + main() + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper + return arg(*args, **kwargs) + ^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 203, in main + launch(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 188, in launch + run(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/run.py", line 883, in run + elastic_launch( + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 139, in __call__ + return launch_agent(self._config, self._entrypoint, list(args)) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 270, in launch_agent + raise ChildFailedError( +torch.distributed.elastic.multiprocessing.errors.ChildFailedError: +============================================================ +./pretrain_gpt_profile.py FAILED +------------------------------------------------------------ +Failures: + +------------------------------------------------------------ +Root Cause (first observed failure): +[0]: + time : 2025-06-21_21:07:39 + host : fs-mbz-gpu-600 + rank : 5 (local_rank: 5) + exitcode : 1 (pid: 695486) + error_file: + traceback : To enable traceback see: https://pytorch.org/docs/stable/elastic/errors.html +============================================================ +E0621 21:07:40.976000 1959055 site-packages/torch/distributed/elastic/multiprocessing/api.py:874] failed (exitcode: 1) local_rank: 6 (pid: 1959131) of binary: /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/bin/python3 +Traceback (most recent call last): + File "", line 198, in _run_module_as_main + File "", line 88, in _run_code + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 207, in + main() + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper + return arg(*args, **kwargs) +E0621 21:07:40.997000 2155103 site-packages/torch/distributed/elastic/multiprocessing/api.py:874] failed (exitcode: 1) local_rank: 2 (pid: 2155175) of binary: /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/bin/python3 + ^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 203, in main + launch(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 188, in launch + run(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/run.py", line 883, in run + elastic_launch( + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 139, in __call__ + return launch_agent(self._config, self._entrypoint, list(args)) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 270, in launch_agent + raise ChildFailedError( +torch.distributed.elastic.multiprocessing.errors.ChildFailedError: +============================================================ +./pretrain_gpt_profile.py FAILED +------------------------------------------------------------ +Failures: + +------------------------------------------------------------ +Root Cause (first observed failure): +[0]: + time : 2025-06-21_21:07:39 + host : fs-mbz-gpu-702 + rank : 14 (local_rank: 6) + exitcode : 1 (pid: 1959131) + error_file: + traceback : To enable traceback see: https://pytorch.org/docs/stable/elastic/errors.html +============================================================ +Traceback (most recent call last): + File "", line 198, in _run_module_as_main + File "", line 88, in _run_code + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 207, in + main() + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper + return arg(*args, **kwargs) + ^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 203, in main + launch(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 188, in launch + run(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/run.py", line 883, in run + elastic_launch( + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 139, in __call__ + return launch_agent(self._config, self._entrypoint, list(args)) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 270, in launch_agent + raise ChildFailedError( +torch.distributed.elastic.multiprocessing.errors.ChildFailedError: +============================================================ +./pretrain_gpt_profile.py FAILED +------------------------------------------------------------ +Failures: + +------------------------------------------------------------ +Root Cause (first observed failure): +[0]: + time : 2025-06-21_21:07:39 + host : fs-mbz-gpu-768 + rank : 26 (local_rank: 2) + exitcode : 1 (pid: 2155175) + error_file: + traceback : To enable traceback see: https://pytorch.org/docs/stable/elastic/errors.html +============================================================ ++ set +x ++ set +x ++ set +x +E0621 21:07:41.362000 1678037 site-packages/torch/distributed/elastic/multiprocessing/api.py:874] failed (exitcode: 1) local_rank: 7 (pid: 1678114) of binary: /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/bin/python3 +W0621 21:07:41.375000 1678037 site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py:1292] The node 'fs-mbz-gpu-717_1678037_0' has failed to shutdown the rendezvous '343199' due to an error of type RendezvousConnectionError. +[W621 21:07:41.274135213 TCPStore.cpp:106] [c10d] sendBytes failed on SocketImpl(fd=4, addr=[fs-mbz-gpu-717]:36498, remote=[fs-mbz-gpu-600]:29500): Broken pipe +Exception raised from sendBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:653 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x1484045785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x1483ed45aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baa358 (0x1483ed45c358 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #3: + 0x5babb3e (0x1483ed45db3e in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: c10d::TCPStore::doWait(c10::ArrayRef, std::allocator > >, std::chrono::duration >) + 0x1a6 (0x1483ed457ac6 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: c10d::TCPStore::doGet(std::__cxx11::basic_string, std::allocator > const&) + 0x33 (0x1483ed457ea3 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #6: c10d::TCPStore::get(std::__cxx11::basic_string, std::allocator > const&) + 0xab (0x1483ed458f8b in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #7: + 0xc0f526 (0x1483fc78b526 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) +frame #8: + 0x37f17d (0x1483fbefb17d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) + +frame #26: + 0x29d90 (0x1484055e0d90 in /lib/x86_64-linux-gnu/libc.so.6) +frame #27: __libc_start_main + 0x80 (0x1484055e0e40 in /lib/x86_64-linux-gnu/libc.so.6) + +W0621 21:07:41.388000 1678037 site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py:1292] The node 'fs-mbz-gpu-717_1678037_0' has failed to shutdown the rendezvous '343199' due to an error of type RendezvousConnectionError. +[W621 21:07:41.285910130 TCPStore.cpp:106] [c10d] sendBytes failed on SocketImpl(fd=4, addr=[fs-mbz-gpu-717]:36498, remote=[fs-mbz-gpu-600]:29500): Broken pipe +Exception raised from sendBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:653 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x1484045785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x1483ed45aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baa358 (0x1483ed45c358 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #3: + 0x5babb3e (0x1483ed45db3e in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: c10d::TCPStore::doWait(c10::ArrayRef, std::allocator > >, std::chrono::duration >) + 0x1a6 (0x1483ed457ac6 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: c10d::TCPStore::doGet(std::__cxx11::basic_string, std::allocator > const&) + 0x33 (0x1483ed457ea3 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #6: c10d::TCPStore::get(std::__cxx11::basic_string, std::allocator > const&) + 0xab (0x1483ed458f8b in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #7: + 0xc0f526 (0x1483fc78b526 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) +frame #8: + 0x37f17d (0x1483fbefb17d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) + +frame #26: + 0x29d90 (0x1484055e0d90 in /lib/x86_64-linux-gnu/libc.so.6) +frame #27: __libc_start_main + 0x80 (0x1484055e0e40 in /lib/x86_64-linux-gnu/libc.so.6) + +W0621 21:07:41.398000 1678037 site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py:1292] The node 'fs-mbz-gpu-717_1678037_0' has failed to shutdown the rendezvous '343199' due to an error of type RendezvousConnectionError. +Traceback (most recent call last): + File "", line 198, in _run_module_as_main + File "", line 88, in _run_code + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 207, in + main() + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper + return arg(*args, **kwargs) + ^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 203, in main + launch(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 188, in launch + run(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/run.py", line 883, in run + elastic_launch( + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 139, in __call__ + return launch_agent(self._config, self._entrypoint, list(args)) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 270, in launch_agent + raise ChildFailedError( +torch.distributed.elastic.multiprocessing.errors.ChildFailedError: +============================================================ +./pretrain_gpt_profile.py FAILED +------------------------------------------------------------ +Failures: + +------------------------------------------------------------ +Root Cause (first observed failure): +[0]: + time : 2025-06-21_21:07:40 + host : fs-mbz-gpu-717 + rank : 23 (local_rank: 7) + exitcode : 1 (pid: 1678114) + error_file: + traceback : To enable traceback see: https://pytorch.org/docs/stable/elastic/errors.html +============================================================ ++ set +x ++ for ctx_length in 1024 2048 4096 8192 12288 16384 24576 32768 40960 49152 65536 81920 98304 131072 ++ export PROF_CTX_LENGTH=16384 ++ PROF_CTX_LENGTH=16384 ++ name='/mnt/sharefs/users/hao.zhang/junda/megatron-prof-data--unstable-v5/mytrace.L16384*tp8.cp4.bs16.json' ++ '[' -f '/mnt/sharefs/users/hao.zhang/junda/megatron-prof-data--unstable-v5/mytrace.L16384*tp8.cp4.bs16.json' ']' ++ echo 'Running ctx_length=16384, TP_SIZE=8, CP_SIZE=4, BATCH_SIZE=16' ++ srun bash ./attnserver.sh ++ which python3 ++ python3 -m torch.distributed.launch --nproc_per_node 8 --nnodes 4 --node_rank 1 --rdzv_id 343199 --rdzv_backend c10d --rdzv_endpoint fs-mbz-gpu-600:29500 ./pretrain_gpt_profile.py --tensor-model-parallel-size 8 --context-parallel-size 4 --num-layers 2 --hidden-size 4096 --num-attention-heads 64 --group-query-attention --num-query-groups 16 --seq-length 16384 --max-position-embeddings 16384 --micro-batch-size 1 --global-batch-size 1 --lr 0.0005 --train-iters 10 --lr-decay-iters 150000 --lr-decay-style cosine --lr-warmup-iters 2 --weight-decay .1 --adam-beta2 .999 --fp16 --log-interval 1 --save-interval 16 --eval-interval 16 --eval-iters 1 --vocab-file vocab.json --merge-file merges.txt --save gpt-checkpoint --load gpt-checkpoint --logging-level 0 --mock-data --tensorboard-dir tensorboard-logs/ ++ which python3 ++ python3 -m torch.distributed.launch --nproc_per_node 8 --nnodes 4 --node_rank 3 --rdzv_id 343199 --rdzv_backend c10d --rdzv_endpoint fs-mbz-gpu-600:29500 ./pretrain_gpt_profile.py --tensor-model-parallel-size 8 --context-parallel-size 4 --num-layers 2 --hidden-size 4096 --num-attention-heads 64 --group-query-attention --num-query-groups 16 --seq-length 16384 --max-position-embeddings 16384 --micro-batch-size 1 --global-batch-size 1 --lr 0.0005 --train-iters 10 --lr-decay-iters 150000 --lr-decay-style cosine --lr-warmup-iters 2 --weight-decay .1 --adam-beta2 .999 --fp16 --log-interval 1 --save-interval 16 --eval-interval 16 --eval-iters 1 --vocab-file vocab.json --merge-file merges.txt --save gpt-checkpoint --load gpt-checkpoint --logging-level 0 --mock-data --tensorboard-dir tensorboard-logs/ ++ which python3 ++ python3 -m torch.distributed.launch --nproc_per_node 8 --nnodes 4 --node_rank 2 --rdzv_id 343199 --rdzv_backend c10d --rdzv_endpoint fs-mbz-gpu-600:29500 ./pretrain_gpt_profile.py --tensor-model-parallel-size 8 --context-parallel-size 4 --num-layers 2 --hidden-size 4096 --num-attention-heads 64 --group-query-attention --num-query-groups 16 --seq-length 16384 --max-position-embeddings 16384 --micro-batch-size 1 --global-batch-size 1 --lr 0.0005 --train-iters 10 --lr-decay-iters 150000 --lr-decay-style cosine --lr-warmup-iters 2 --weight-decay .1 --adam-beta2 .999 --fp16 --log-interval 1 --save-interval 16 --eval-interval 16 --eval-iters 1 --vocab-file vocab.json --merge-file merges.txt --save gpt-checkpoint --load gpt-checkpoint --logging-level 0 --mock-data --tensorboard-dir tensorboard-logs/ ++ which python3 ++ python3 -m torch.distributed.launch --nproc_per_node 8 --nnodes 4 --node_rank 0 --rdzv_id 343199 --rdzv_backend c10d --rdzv_endpoint fs-mbz-gpu-600:29500 ./pretrain_gpt_profile.py --tensor-model-parallel-size 8 --context-parallel-size 4 --num-layers 2 --hidden-size 4096 --num-attention-heads 64 --group-query-attention --num-query-groups 16 --seq-length 16384 --max-position-embeddings 16384 --micro-batch-size 1 --global-batch-size 1 --lr 0.0005 --train-iters 10 --lr-decay-iters 150000 --lr-decay-style cosine --lr-warmup-iters 2 --weight-decay .1 --adam-beta2 .999 --fp16 --log-interval 1 --save-interval 16 --eval-interval 16 --eval-iters 1 --vocab-file vocab.json --merge-file merges.txt --save gpt-checkpoint --load gpt-checkpoint --logging-level 0 --mock-data --tensorboard-dir tensorboard-logs/ +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py:207: FutureWarning: The module torch.distributed.launch is deprecated +and will be removed in future. Use torchrun. +Note that --use-env is set by default in torchrun. +If your script expects `--local-rank` argument to be set, please +change it to read from `os.environ['LOCAL_RANK']` instead. See +https://pytorch.org/docs/stable/distributed.html#launch-utility for +further instructions + + main() +W0621 21:07:44.285000 1679871 site-packages/torch/distributed/run.py:766] +W0621 21:07:44.285000 1679871 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:07:44.285000 1679871 site-packages/torch/distributed/run.py:766] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0621 21:07:44.285000 1679871 site-packages/torch/distributed/run.py:766] ***************************************** +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py:207: FutureWarning: The module torch.distributed.launch is deprecated +and will be removed in future. Use torchrun. +Note that --use-env is set by default in torchrun. +If your script expects `--local-rank` argument to be set, please +change it to read from `os.environ['LOCAL_RANK']` instead. See +https://pytorch.org/docs/stable/distributed.html#launch-utility for +further instructions + + main() +W0621 21:07:44.304000 2156937 site-packages/torch/distributed/run.py:766] +W0621 21:07:44.304000 2156937 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:07:44.304000 2156937 site-packages/torch/distributed/run.py:766] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0621 21:07:44.304000 2156937 site-packages/torch/distributed/run.py:766] ***************************************** +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py:207: FutureWarning: The module torch.distributed.launch is deprecated +and will be removed in future. Use torchrun. +Note that --use-env is set by default in torchrun. +If your script expects `--local-rank` argument to be set, please +change it to read from `os.environ['LOCAL_RANK']` instead. See +https://pytorch.org/docs/stable/distributed.html#launch-utility for +further instructions + + main() +W0621 21:07:44.377000 1960889 site-packages/torch/distributed/run.py:766] +W0621 21:07:44.377000 1960889 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:07:44.377000 1960889 site-packages/torch/distributed/run.py:766] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0621 21:07:44.377000 1960889 site-packages/torch/distributed/run.py:766] ***************************************** +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py:207: FutureWarning: The module torch.distributed.launch is deprecated +and will be removed in future. Use torchrun. +Note that --use-env is set by default in torchrun. +If your script expects `--local-rank` argument to be set, please +change it to read from `os.environ['LOCAL_RANK']` instead. See +https://pytorch.org/docs/stable/distributed.html#launch-utility for +further instructions + + main() +W0621 21:07:44.470000 697279 site-packages/torch/distributed/run.py:766] +W0621 21:07:44.470000 697279 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:07:44.470000 697279 site-packages/torch/distributed/run.py:766] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0621 21:07:44.470000 697279 site-packages/torch/distributed/run.py:766] ***************************************** +[rank24]:[W621 21:08:08.639958940 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 24] using GPU 0 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank8]:[W621 21:08:08.321308444 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 8] using GPU 0 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank16]:[W621 21:08:08.262083035 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 16] using GPU 0 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank0]:[W621 21:08:08.764894116 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 0] using GPU 0 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank1]:[W621 21:08:08.787511904 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 1] using GPU 1 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank9]:[W621 21:08:08.668605173 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 9] using GPU 1 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank7]:[W621 21:08:08.789408823 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 7] using GPU 7 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank3]:[W621 21:08:08.789429221 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 3] using GPU 3 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank4]:[W621 21:08:08.789451775 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 4] using GPU 4 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank5]:[W621 21:08:08.789580173 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 5] using GPU 5 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank13]:[W621 21:08:08.669681868 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 13] using GPU 5 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank11]:[W621 21:08:08.669712528 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 11] using GPU 3 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank6]:[W621 21:08:08.789731073 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 6] using GPU 6 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank12]:[W621 21:08:08.669738266 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 12] using GPU 4 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank15]:[W621 21:08:08.669819547 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 15] using GPU 7 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank30]:[W621 21:08:08.993158135 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 30] using GPU 6 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank29]:[W621 21:08:08.993337592 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 29] using GPU 5 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank28]:[W621 21:08:08.993405023 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 28] using GPU 4 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank22]:[W621 21:08:08.381283616 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 22] using GPU 6 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank20]:[W621 21:08:08.381281915 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 20] using GPU 4 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank23]:[W621 21:08:08.381286095 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 23] using GPU 7 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank27]:[W621 21:08:08.993433013 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 27] using GPU 3 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank21]:[W621 21:08:08.381415074 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 21] using GPU 5 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank31]:[W621 21:08:08.993904171 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 31] using GPU 7 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank19]:[W621 21:08:08.381467916 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 19] using GPU 3 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank2]:[W621 21:08:08.791819121 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 2] using GPU 2 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank17]:[W621 21:08:08.381917487 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 17] using GPU 1 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank14]:[W621 21:08:08.673083271 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 14] using GPU 6 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank18]:[W621 21:08:08.383645011 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 18] using GPU 2 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank25]:[W621 21:08:08.996114646 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 25] using GPU 1 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank26]:[W621 21:08:08.996407083 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 26] using GPU 2 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank10]:[W621 21:08:08.673741416 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 10] using GPU 2 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +[rank6]: Traceback (most recent call last): +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank6]: pretrain( +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank6]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank6]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank6]: ^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank6]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank6]: return _load_global_dist_base_checkpoint( +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank6]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank6]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank20]: Traceback (most recent call last): +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank20]: pretrain( +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank20]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank20]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank20]: ^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank20]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank29]: Traceback (most recent call last): +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank29]: pretrain( +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank29]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank29]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank29]: ^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank29]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank15]: Traceback (most recent call last): +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank15]: pretrain( +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank15]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank15]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank15]: ^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank15]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank6]: checkpoint.load_state_dict( +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank6]: return arg(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank6]: return _load_state_dict( +[rank6]: ^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank20]: return _load_global_dist_base_checkpoint( +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank20]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank20]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank29]: return _load_global_dist_base_checkpoint( +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank29]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank29]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank15]: return _load_global_dist_base_checkpoint( +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank15]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank15]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank6]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank6]: raise result +[rank6]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank6]: Traceback (most recent call last): (RANK 0) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank20]: checkpoint.load_state_dict( +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank20]: return arg(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank20]: return _load_state_dict( +[rank20]: ^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank29]: checkpoint.load_state_dict( +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank29]: return arg(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank29]: return _load_state_dict( +[rank29]: ^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank15]: checkpoint.load_state_dict( +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank15]: return arg(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank15]: return _load_state_dict( +[rank15]: ^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank20]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank20]: raise result +[rank20]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank20]: Traceback (most recent call last): (RANK 0) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank29]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank29]: raise result +[rank29]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank29]: Traceback (most recent call last): (RANK 0) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank15]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank15]: raise result +[rank15]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank15]: Traceback (most recent call last): (RANK 0) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: Traceback (most recent call last): (RANK 1) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 2) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 1) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 1) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 1) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 2) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 2) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 2) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: Traceback (most recent call last): (RANK 3) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: raise CheckpointingException(_msg) +[rank29]: raise CheckpointingException(_msg) +[rank15]: raise CheckpointingException(_msg) +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 4) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 3) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 3) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 3) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 4) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 4) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 4) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 5) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 5) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 5) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 5) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 6) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 6) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 6) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 6) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 7) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 7) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 7) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 7) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 8) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank20]: Traceback (most recent call last): (RANK 8) +[rank29]: Traceback (most recent call last): (RANK 8) +[rank15]: Traceback (most recent call last): (RANK 8) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: Traceback (most recent call last): (RANK 9) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 9) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 9) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 9) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 10) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 10) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 10) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 10) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: Traceback (most recent call last): (RANK 11) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 11) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 11) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 11) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 12) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 12) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 12) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 12) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 13) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 13) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 13) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 13) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 14) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 14) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 14) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 14) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 15) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 15) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 15) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 15) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 16) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank20]: raise CheckpointingException(_msg) +[rank29]: raise CheckpointingException(_msg) +[rank15]: raise CheckpointingException(_msg) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 16) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 16) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 16) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank6]: Traceback (most recent call last): (RANK 17) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 17) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 17) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 17) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 18) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 18) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 18) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 18) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 19) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 19) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 19) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: Traceback (most recent call last): (RANK 19) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 20) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 20) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 20) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 20) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 21) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank20]: Traceback (most recent call last): (RANK 21) +[rank29]: Traceback (most recent call last): (RANK 21) +[rank15]: Traceback (most recent call last): (RANK 21) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 22) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 22) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 22) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 22) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 23) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 23) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 23) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 23) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 24) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 24) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 24) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 24) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank6]: Traceback (most recent call last): (RANK 25) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 25) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 25) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 25) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 26) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 26) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 26) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 26) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank6]: Traceback (most recent call last): (RANK 27) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 27) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 27) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 27) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 28) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 28) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 28) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 28) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 29) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank20]: raise CheckpointingException(_msg) +[rank29]: raise CheckpointingException(_msg) +[rank15]: raise CheckpointingException(_msg) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 29) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 29) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 29) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 30) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 30) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 30) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 30) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 31) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 31) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 31) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 31) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight + +[rank4]: Traceback (most recent call last): +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank4]: pretrain( +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight + +[rank22]: Traceback (most recent call last): +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank22]: pretrain( +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight + +[rank28]: Traceback (most recent call last): +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank28]: pretrain( +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight + +[rank13]: Traceback (most recent call last): +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank13]: pretrain( +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank4]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank22]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank22]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank22]: ^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank22]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank28]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank28]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank28]: ^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank28]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank13]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank13]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank13]: ^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank13]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank4]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank4]: ^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank4]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank4]: return _load_global_dist_base_checkpoint( +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank22]: return _load_global_dist_base_checkpoint( +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank22]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank22]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank28]: return _load_global_dist_base_checkpoint( +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank28]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank28]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank13]: return _load_global_dist_base_checkpoint( +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank13]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank13]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank4]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank4]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank4]: checkpoint.load_state_dict( +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank22]: checkpoint.load_state_dict( +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank22]: return arg(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank22]: return _load_state_dict( +[rank22]: ^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank22]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank28]: checkpoint.load_state_dict( +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank28]: return arg(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank28]: return _load_state_dict( +[rank28]: ^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank28]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank13]: checkpoint.load_state_dict( +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank13]: return arg(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank13]: return _load_state_dict( +[rank13]: ^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank13]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank4]: return arg(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank4]: return _load_state_dict( +[rank4]: ^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank4]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank4]: raise result +[rank4]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank4]: Traceback (most recent call last): (RANK 0) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank22]: raise result +[rank22]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank22]: Traceback (most recent call last): (RANK 0) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank28]: raise result +[rank28]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank28]: Traceback (most recent call last): (RANK 0) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank13]: raise result +[rank13]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank13]: Traceback (most recent call last): (RANK 0) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 1) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank22]: Traceback (most recent call last): (RANK 1) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: Traceback (most recent call last): (RANK 1) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: Traceback (most recent call last): (RANK 1) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 2) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 2) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 2) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 2) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 3) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: ^^^^^^^^^ +[rank22]: Traceback (most recent call last): (RANK 3) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: Traceback (most recent call last): (RANK 3) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: Traceback (most recent call last): (RANK 3) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 4) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 4) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 4) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 4) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 5) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: Traceback (most recent call last): (RANK 5) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: Traceback (most recent call last): (RANK 5) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: Traceback (most recent call last): (RANK 5) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: raise CheckpointingException(_msg) +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 6) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py"[rank25]: Traceback (most recent call last): +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank25]: pretrain( +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank25]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank25]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank25]: ^^^^^^^^^^^^^^^^ +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 6) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 6) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank25]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank25]: return _load_global_dist_base_checkpoint( +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank25]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 7) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 7) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank25]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank25]: checkpoint.load_state_dict( +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank25]: return arg(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank25]: return _load_state_dict( +[rank25]: ^^^^^^^^^^^^^^^^^ +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 7) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank25]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 8) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/minicond[rank2]: Traceback (most recent call last): +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 8) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank25]: raise result +[rank25]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank25]: Traceback (most recent call last): (RANK 0) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 8) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank2]: pretrain( +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line[rank19]: Traceback (most recent call last): +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank19]: pretrain( +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank19]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank19]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank19]: ^^^^^^^^^^^^^^^^ +[rank25]: re[rank30]: Traceback (most recent call last): +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank30]: pretrain( +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank30]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank30]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank30]: ^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line[rank8]: Traceback (most recent call last): +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank8]: pretrain( +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank8]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank8]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank8]: ^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank2]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank2]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank2]: ^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank2]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank19]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank19]: return _load_global_dist_base_checkpoint( +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank19]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank30]: return _load_global_dist_base_checkpoint( +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank30]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank8]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank8]: return _load_global_dist_base_checkpoint( +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank8]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank2]: return _load_global_dist_base_checkpoint( +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank2]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank2]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank19]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank19]: checkpoint.load_state_dict( +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank19]: return arg(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank19]: return _load_state_dict( +[rank19]: ^^^^^^^^^^^^^^^^^ +[rank30]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank30]: checkpoint.load_state_dict( +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank30]: return arg(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank30]: return _load_state_dict( +[rank30]: ^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank8]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank8]: checkpoint.load_state_dict( +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank8]: return arg(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank8]: return _load_state_dict( +[rank8]: ^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank2]: checkpoint.load_state_dict( +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank2]: return arg(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank2]: return _load_state_dict( +[rank2]: ^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank2]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank19]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank19]: raise result +[rank19]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank19]: Traceback (most recent call last): (RANK 0) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank30]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank8]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank8]: raise result +[rank8]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank8]: Traceback (most recent call last): (RANK 0) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank2]: raise result +[rank2]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank2]: Traceback (most recent call last): (RANK 0) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: re[rank16]: Traceback (most recent call last): +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank16]: pretrain( +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank16]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank30]: raise result +[rank30]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank30]: Traceback (most recent call last): (RANK 0) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: result = func(*args, **kwargs) +[rank2]: [rank3]: Traceback (most recent call last): +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank3]: pretrain( +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank16]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank16]: ^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank16]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank16]: return _load_global_dist_base_checkpoint( +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: re[rank31]: Traceback (most recent call last): +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank31]: pretrain( +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank31]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank31]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank31]: ^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 9) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank3]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank3]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank3]: ^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank3]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank16]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank16]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank16]: checkpoint.load_state_dict( +[rank31]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank31]: return _load_global_dist_base_checkpoint( +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank31]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank3]: return _load_global_dist_base_checkpoint( +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank3]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank3]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank16]: return arg(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank16]: return _load_state_dict( +[rank16]: ^^^^^^^^^^^^^^^^^ +[rank31]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank31]: checkpoint.load_state_dict( +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank31]: return arg(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank31]: return _load_state_dict( +[rank31]: ^^^^^^^^^^^^^^^^^ +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 10) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank16]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank16]: raise result +[rank16]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank16]: Traceback (most recent call last): (RANK 0) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank31]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank3]: checkpoint.load_state_dict( +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank3]: return arg(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank3]: return _load_state_dict( +[rank3]: ^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank3]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: re[rank21]: Traceback (most recent call last): +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank21]: pretrain( +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank21]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank31]: raise result +[rank31]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank31]: Traceback (most recent call last): (RANK 0) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16[rank9]: Traceback (most recent call last): +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank9]: pretrain( +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank9]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank3]: raise result +[rank3]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank3]: Traceback (most recent call last): (RANK 0) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank21]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank21]: ^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank21]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank21]: return _load_global_dist_base_checkpoint( +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: re, line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 6) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank9]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank9]: ^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank9]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank9]: return _load_global_dist_base_checkpoint( +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: result = func(*args, **kwargs) +[rank3]: a/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank21]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank21]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank21]: checkpoint.load_state_dict( +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank9]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank9]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank9]: checkpoint.load_state_dict( +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 9) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank21]: return arg(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank21]: return _load_state_dict( +[rank21]: ^^^^^^^^^^^^^^^^^ +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 7) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank9]: return arg(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^ +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank21]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank21]: raise result +[rank21]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank21]: Traceback (most recent call last): (RANK 0) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank9]: return _load_state_dict( +[rank9]: ^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank9]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank9]: raise result +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 10) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: re[rank17]: Traceback (most recent call last): +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank17]: pretrain( +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank17]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 8) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank9]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank9]: Traceback (most recent call last): (RANK 0) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: [rank10]: Traceback (most recent call last): +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank10]: pretrain( +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 11) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/li[rank1]: Traceback (most recent call last): +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank17]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank17]: ^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank17]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank17]: return _load_global_dist_base_checkpoint( +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", linesult = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank10]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank10]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank10]: ^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank10]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank17]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank17]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank17]: checkpoint.load_state_dict( +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 1) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank10]: return _load_global_dist_base_checkpoint( +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank10]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank10]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: pretrain( +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank1]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank1]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank1]: ^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank1]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank17]: return arg(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank17]: return _load_state_dict( +[rank17]: ^^^^^^^^^^^^^^^^^ +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 2) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank10]: checkpoint.load_state_dict( +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank10]: return arg(*args, **kwargs) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank1]: return _load_global_dist_base_checkpoint( +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank1]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank1]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank17]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank17]: raise result +[rank17]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank17]: Traceback (most recent call last): (RANK 0) +[rank25]: ^^^^^^^^^ +[rank10]: ^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank10]: return _load_state_dict( +[rank10]: ^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank10]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank10]: raise result +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank1]: checkpoint.load_state_dict( +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank1]: return arg(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank1]: return _load_state_dict( +[rank1]: ^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank1]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: re[rank23]: Traceback (most recent call last): +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank23]: pretrain( +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank23]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank10]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank10]: Traceback (most recent call last): (RANK 0) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: re[rank12]: Traceback (most recent call last): +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank12]: pretrain( +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank1]: raise result +[rank1]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank1]: Traceback (most recent call last): (RANK 0) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank23]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank23]: ^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank23]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank23]: return _load_global_dist_base_checkpoint( +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedd[rank27]: Traceback (most recent call last): +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank27]: pretrain( +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank27]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank27]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank27]: ^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank12]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank12]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank12]: ^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank12]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: result = func(*args, **kwargs) +[rank1]: [rank7]: Traceback (most recent call last): +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank23]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank23]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank23]: checkpoint.load_state_dict( +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank27]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank27]: return _load_global_dist_base_checkpoint( +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank27]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank12]: return _load_global_dist_base_checkpoint( +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank12]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank12]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: pretrain( +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank7]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank7]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank7]: ^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank7]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank23]: return arg(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank23]: return _load_state_dict( +[rank23]: ^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank27]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank27]: checkpoint.load_state_dict( +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank27]: return arg(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank27]: return _load_state_dict( +[rank27]: ^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank12]: checkpoint.load_state_dict( +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank12]: return arg(*args, **kwargs) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank7]: return _load_global_dist_base_checkpoint( +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank7]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank7]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank23]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank23]: raise result +[rank23]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank23]: Traceback (most recent call last): (RANK 0) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank27]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank27]: raise result +[rank27]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank27]: Traceback (most recent call last): (RANK 0) +[rank12]: ^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank12]: return _load_state_dict( +[rank12]: ^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank12]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank12]: raise result +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank7]: checkpoint.load_state_dict( +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank7]: return arg(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank7]: return _load_state_dict( +[rank7]: ^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank7]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank12]: Traceback (most recent call last): (RANK 0) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: re384, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 11) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank7]: raise result +[rank7]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank7]: Traceback (most recent call last): (RANK 0) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 1) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 1) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: result = func(*args, **kwargs) +[rank7]: [rank5]: Traceback (most recent call last): +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 12) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: pretrain( +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank5]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank5]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank5]: ^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank5]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 2) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 2) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank5]: return _load_global_dist_base_checkpoint( +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank5]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank5]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 13) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank5]: checkpoint.load_state_dict( +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank5]: return arg(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank5]: return _load_state_dict( +[rank5]: ^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank5]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embeddsult = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embeddsult = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dis ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank5]: raise result +[rank5]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank5]: Traceback (most recent call last): (RANK 0) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 1) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 1) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 1) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank5]: result = func(*args, **kwargs) +[rank5]: [rank0]: Traceback (most recent call last): +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank0]: pretrain( +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank0]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank0]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank0]: ^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank0]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 2) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 2) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 2) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank0]: return _load_global_dist_base_checkpoint( +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank0]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank0]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANKsult = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank0]: checkpoint.load_state_dict( +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank0]: return arg(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank0]: return _load_state_dict( +[rank0]: ^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank0]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embeddsult = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedd[rank26]: Traceback (most recent call last): +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank26]: pretrain( +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank26]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank0]: raise result +[rank0]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank0]: Traceback (most recent call last): (RANK 0) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 1) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank26]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank26]: ^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank26]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank26]: return _load_global_dist_base_checkpoint( +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: Traceback (most recent call last): (RANK 1) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank26]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank26]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank26]: checkpoint.load_state_dict( +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 1) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 2) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank26]: return arg(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 2) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank26]: return _load_state_dict( +[rank26]: ^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank26]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank26]: raise result +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedd ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 2) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedd 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank26]: Traceback (most recent call last): (RANK 0) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: re[rank24]: Traceback (most recent call last): +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank24]: pretrain( +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 1) +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 9) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank24]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank24]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank24]: ^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank24]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank24]: return _load_global_dist_base_checkpoint( +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank24]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank24]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 1) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank24]: checkpoint.load_state_dict( +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank24]: return arg(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 2) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: Traceback (most recent call last): (RANK 10) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank24]: return _load_state_dict( +[rank24]: ^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank24]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank24]: raise result +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 2) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16sult = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank24]: Traceback (most recent call last): (RANK 0) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: Traceback (most recent call last): (RANK[rank11]: Traceback (most recent call last): +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank11]: pretrain( +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank11]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank11]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank11]: ^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 1) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 1) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank11]: return _load_global_dist_base_checkpoint( +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank11]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank3]: Traceback (most recent call last): (RANKb/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank11]: checkpoint.load_state_dict( +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank11]: return arg(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank11]: return _load_state_dict( +[rank11]: ^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 12) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: Traceback (most recent call last): (RANK 2) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 2) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank11]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank11]: raise result +[rank11]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank11]: Traceback (most recent call last): (RANK 0) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 13) +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 3) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 3) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 1) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 4) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 4) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatro ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 5) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 2) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 1) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 5) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py"sult = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 2) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py"[rank18]: Traceback (most recent call last): +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank18]: pretrain( +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 1) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedd[rank14]: Traceback (most recent call last): +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank14]: pretrain( +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank14]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank14]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank14]: ^^^^^^^^^^^^^^^^ +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank18]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank18]: ^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank18]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank18]: return _load_global_dist_base_checkpoint( +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank14]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank14]: return _load_global_dist_base_checkpoint( +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank14]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: raise CheckpointingException(_msg) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank18]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank18]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank18]: checkpoint.load_state_dict( +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank14]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank14]: checkpoint.load_state_dict( +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank14]: return arg(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank14]: return _load_state_dict( +[rank14]: ^^^^^^^^^^^^^^^^^ +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 3) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank18]: return arg(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank18]: return _load_state_dict( +[rank18]: ^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank18]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 2) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank14]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank14]: raise result +[rank14]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank14]: Traceback (most recent call last): (RANK 0) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 4) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank18]: raise result +[rank18]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank18]: Traceback (most recent call last): (RANK 0) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: re384, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 3) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: Traceback (most recent call last): (RANK 11) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 1) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 5) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 12) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 4) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: local_plan = planner.create_local_plan() +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_ ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 5) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: Traceback (most recent call last): (RANK 2) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 1) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: Traceback (most recent call last): (RANK 13) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dising.position_embeddings.weight +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embeddt_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 2) +[rank23]: Traceback (most recent call last): (RANK 3) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 6) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 14) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 4) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: Traceback (most recent call last): (RANK 7) +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 15) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 1) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: Traceback (most recent call last): (RANK 5) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py"ing.position_embeddings.weight +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 8) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", linesult = func(*args, **kwargs) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 16) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/ 3) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 2) +[rank21]: Traceback (most recent call last): (RANK 3) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 4) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: Traceback (most recent call last): (RANK 1) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 4) +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 2) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 5) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 1) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: Traceback (most recent call last): (RANK 5) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 3) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_ing.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 3) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 2) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py"t_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 14) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 4) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 4) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 5) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 3) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: Traceback (most recent call last): (RANK 15) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 5) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py"distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 4) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 16) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/, line 605, in create_local_plan +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py"ing.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 3) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 6) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 4) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank13]: Traceback (most recent call last): (RANK 17) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 5) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 18) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_ 3) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: Traceback (most recent call last): (RANK 7) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 5) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torctensors) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 4) +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 8) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line, line 605, in create_local_plan +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py" 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 6) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 6) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 9) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 5) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 7) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 10) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_ 3) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: Traceback (most recent call last): (RANK 7) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 8) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/minicondsult = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 4) +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 8) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", linesult = func(*args, **kwargs) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16, line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 6) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank14]: Traceback (most recent call last): (RANK 1) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 5) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: Traceback (most recent call last): (RANK 1) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 7) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 2) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 8) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 2) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_ 3) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line, line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 6) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 4) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedddistributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: Traceback (most recent call last): (RANK 3) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 17) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: raise CheckpointingException(_msg) +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 5) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 7) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 4) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 18) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 8) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 6) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line384, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 11) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: Traceback (most recent call last): (RANK 5) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py" 3) +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torc 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 12) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 9) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank31]: local_data = map_fun() +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 7) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 4) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 8) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 10) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 13) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 5) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dising.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 3) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/minicondn/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 14) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16ing.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 3) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 4) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_, line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 6) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 4) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank26]: local_data = map_fun() +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 15) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^ +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 16) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 5) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 7) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^ +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 5) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py" 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 8) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpointtensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 6) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py"ing.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 3) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 9) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line, line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 6) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 4) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 7) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: ^^^^^^^^^ +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 10) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 7) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 8) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 5) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16, line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 6) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 8) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/minicond 3) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py"h.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 19) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", lineh.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 19) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 4) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank22]: Traceback (most recent call last): (RANK 20) +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 7) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 20) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 5) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 8) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 21) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", linet_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 14) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 21) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/jun, line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/juna/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 6) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: Traceback (most recent call last): (RANK 6) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: Traceback (most recent call last): (RANK 9) +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 7) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 15) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 7) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 10) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 16) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/384, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 11) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank3]: Traceback (most recent call last): (RANK 8) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/minicondtensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank18]: Traceback (most recent call last): (RANK 8) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line, line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 11) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/li 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 6) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank21]: Traceback (most recent call last): (RANK 6) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: Traceback (most recent call last): (RANK 12) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 7) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 9) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 7) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 13) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 10) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dis 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: Traceback (most recent call last): (RANK 8) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: Traceback (most recent call last): (RANK 8) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line, line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 9) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16ing.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 3) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 9) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: Traceback (most recent call last): (RANK 6) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: local_plan = planner.create_local_plan() +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 4) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 10) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank25]: Traceback (most recent call last): (RANK 10) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 5) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 11) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lia/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 7) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 17) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py"tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 6) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 9) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank19]: Traceback (most recent call last): (RANK 8) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line384, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 11) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 7) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 10) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank31]: Traceback (most recent call last): (RANK 18) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 8) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 12) +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torct_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/minicond384, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 11) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 11) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/li/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: Traceback (most recent call last): (RANK 14) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 17) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 13) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 15) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: Traceback (most recent call last): (RANK 12) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dissult = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 13) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 18) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 1) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 16) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/ 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dis, line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 9) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 6) +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 19) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in a/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 9) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank17]: Traceback (most recent call last): (RANK 2) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 7) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedd 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: Traceback (most recent call last): (RANK 10) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 10) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 9) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16h.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 19) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 8) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", lineda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 11) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lia/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 20) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 22) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 9) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank19]: Traceback (most recent call last): (RANK 10) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 21) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 23) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16da/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/jundistributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 10) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 22) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 17) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 24) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnsea/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 11) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/litensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 23) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 9) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 6) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 18) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 7) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 24) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnset_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torc, line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 10) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 14) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 6) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 11) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lit_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 8) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/minicondtensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 15) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 7) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 14) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 6) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 15) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 7) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 16) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/ing.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 3) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 8) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", lineda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 22) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 16) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/rver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 4) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 8) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/minicondb/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 5) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 23) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 25) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py"rver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 12) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 24) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnseing.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 3) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 26) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 13) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank22]: Traceback (most recent call last): (RANK 25) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingExceptionb/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank2]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatrob/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 26) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 4) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingExceptiondistributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 5) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 12) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 12) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py" 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 13) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 13) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: Traceback (most recent call last): (RANK 17) +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 9) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatrodistributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: local_data = map_fun() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatroreduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 18) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 17) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torc, line 605, in create_local_plan +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 10) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 20) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 6) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16rver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 18) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 21) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torc 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 25) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: Traceback (most recent call last): (RANK 7) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 9) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shb/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 8) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 26) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 12) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line384, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 11) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException, line 605, in create_local_plan +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 10) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 6) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16n/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 13) +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 12) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: raise CheckpointingException(_msg) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 14) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatrob/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 13) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank30]: Traceback (most recent call last): (RANK 7) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 15) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 12) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dis 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 8) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", lineh.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 19) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 16) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 13) +[rank21]: Traceback (most recent call last): (RANK 9) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 10) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 20) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatroa/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank10]: Traceback (most recent call last): (RANK 9) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16h.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 19) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 21) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 10) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 9) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/jun384, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 11) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 10) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 20) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16b/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 12) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 21) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 12) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: Traceback (most recent call last): (RANK 11) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lia/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/jun 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 13) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 13) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 9) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 9) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dis 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 10) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 10) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 9) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 10) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatro: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 27) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 11) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 28) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: Traceback (most recent call last): (RANK 11) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lin/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 14) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 12) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16da/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 22) +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 29) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: Traceback (most recent call last): (RANK 15) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 13) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 16) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/disda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 23) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1h.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 19) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpointn/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 14) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank16]: Traceback (most recent call last): (RANK 22) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 20) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: Traceback (most recent call last): (RANK 24) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnset_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 15) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 23) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: Traceback (most recent call last): (RANK 14) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 21) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 16) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 24) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnse384, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 11) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 15) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/jun384, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 11) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpointapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 22) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 16) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 12) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/384, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 11) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: Traceback (most recent call last): (RANK 23) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: Traceback (most recent call last): (RANK 12) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 12) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 13) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 24) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 13) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = plannen/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 14) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 13) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dis384, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 11) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 15) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank21]: Traceback (most recent call last): (RANK 14) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/disn/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 14) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 15) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 12) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 16) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 15) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpointn/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 14) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 16) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/t_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 13) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 16) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/disdistributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 15) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: Traceback (most recent call last): (RANK 14) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 15) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 17) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 17) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 16) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 18) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpointb/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 16) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/ 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank28]: Traceback (most recent call last): (RANK 18) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 12) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torct_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: raise CheckpointingException(_msg) +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 9) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: Traceback (most recent call last): (RANK 14) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 19) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in 384, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 11) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 13) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 10) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 15) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: Traceback (most recent call last): (RANK 12) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatrob/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16t_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 13) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 12) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 16) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/t_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dis3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 14) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 14) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 30) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 15) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 31) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 13) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 15) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatro/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 16) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 27) +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank13]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 17) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 16) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/h.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 19) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight + +reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 18) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 20) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 28) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 20) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 21) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 19) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in /state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: Traceback (most recent call last): (RANK 29) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 21) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 17) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/jundistributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 22) +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 18) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 17) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 17) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 23) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 18) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 19) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in r.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torcdistributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 18) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 24) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 25) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torc384, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 11) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnseing.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 3) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 26) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 17) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: ^^^^^^^^^ +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 4) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 18) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: Traceback (most recent call last): (RANK 12) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 5) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 27) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torcdistributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 13) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 17) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dis 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py"apes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 22) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 18) +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 17) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 9) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 23) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 19) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in /state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 18) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 24) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 17) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torcrver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 10) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16da/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = plannet_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 18) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 14) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: Traceback (most recent call last): (RANK 25) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 22) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 15) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 19) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in n/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 26) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 23) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 14) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingExceptionh.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 15) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank19]: Traceback (most recent call last): (RANK 19) +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 24) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnsedistributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 16) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/t_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: local_plan = planner.create_local_plan() +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 14) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 20) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 17) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 15) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank5]: Traceback (most recent call last): (RANK 16) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpointn/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 14) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 21) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 18) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: raise CheckpointingException(_msg) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 15) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junh.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 19) +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torct_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 16) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch//state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: raise CheckpointingException(_msg) +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 17) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 20) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 14) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank0]: Traceback (most recent call last): (RANK 16) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpointreduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 15) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 18) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 21) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: Traceback (most recent call last): (RANK 20) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/jun2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 16) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/h.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 19) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 19) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in rver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 21) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 30) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: local_data = map_fun() +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 25) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shreduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 31) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 20) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 26) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 20) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight + +da/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 21) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junrver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException, line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 21) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 22) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 6) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_sh^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 23) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 25) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 7) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 24) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnseh.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 19) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 26) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: raise CheckpointingException(_msg) +[rank4]: Traceback (most recent call last): (RANK 28) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingExceptiondistributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 8) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", linedistributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 29) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 17) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.distreduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 20) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 17) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 21) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 18) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 20) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 18) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torc384, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 11) +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torc: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 27) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 21) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/jun384, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 11) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shreduce_scatter +[rank1]: local_data = map_fun() +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 28) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 12) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 12) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 20) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 13) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 29) +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/disrver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: Traceback (most recent call last): (RANK 13) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 21) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/disda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_sh/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 25) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 22) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: Traceback (most recent call last): (RANK 9) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 17) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 10) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 26) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: Traceback (most recent call last): (RANK 23) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingExceptionda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 24) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnseh.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 18) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: Traceback (most recent call last): (RANK 19) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 19) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in /state_dict_loader.py", line 223, in local_step +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 22) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: Traceback (most recent call last): (RANK 17) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 17) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 23) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 20) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 18) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank10]: result = func(*args, **kwargs) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 24) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnse: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 27) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 21) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torcr.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 18) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/jun: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 25) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 19) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in apes +[rank2]: raise CheckpointingException(_msg) +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 28) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: Traceback (most recent call last): (RANK 27) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 22) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 28) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 26) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 23) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 29) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: ^^^^^^^^^ +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 27) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 27) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^reduce_scatter +[rank8]: local_data = map_fun() +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 24) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planneapes +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 28) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 29) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 20) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 22) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3rver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 21) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 23) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 29) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: local_data = map_fun() +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1rver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: Traceback (most recent call last): (RANK 25) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 26) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_sh1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 30) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 24) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: Traceback (most recent call last): (RANK 25) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: local_plan = planne_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 30) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 26) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 27) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 31) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 31) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 28) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: ^^^^^^^^^ +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight + +384, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 11) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 30) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 29) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight + +apes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 22) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2rver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 12) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 31) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 25) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 13) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: Traceback (most recent call last): (RANK 23) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight + +9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 26) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 24) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dish.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 19) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planneapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 22) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 30) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 31) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 30) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 20) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 23) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight + +: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 27) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 31) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: Traceback (most recent call last): (RANK 21) +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 24) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight + +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/jun^^^^^^ +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank21]: Traceback (most recent call last): (RANK 28) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 25) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 29) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 30) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 28) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 26) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2t_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 29) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank18]: Traceback (most recent call last): (RANK 14) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 31) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight + +: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 27) +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.distapes +[rank8]: raise CheckpointingException(_msg) +[rank1]: Traceback (most recent call last): (RANK 27) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 15) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 22) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 28) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 23) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 16) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 28) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 29) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank8]: raise CheckpointingException(_msg) +[rank1]: Traceback (most recent call last): (RANK 29) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 30) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 27) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 24) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 30) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 31) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank8]: local_plan = planneh.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 19) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 20) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 31) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight + +distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 28) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 17) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 29) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 21) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight + +reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank37]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junt_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 14) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 20) +[rank18]: Traceback (most recent call last): (RANK 18) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 30) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torcda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 31) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 15) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 21) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 22) +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 16) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight + +0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/da/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 22) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shreduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 23) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 30) +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 23) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: Traceback (most recent call last): (RANK 20) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 24) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnse 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 31) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 21) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 24) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 9) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight + +da/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 22) +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnse_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 30) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shr.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 31) +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 25) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: Traceback (most recent call last): (RANK 10) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16rver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 23) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 26) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 24) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight + +r.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 25) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 25) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 27) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^r.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 25) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 26) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 25) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 26) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 26) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingExceptionh.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 19) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 26) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 27) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 27) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^da/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 22) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 27) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^r.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: Traceback (most recent call last): (RANK 20) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: Traceback (most recent call last): (RANK 28) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 23) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 25) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 21) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 29) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/jun384, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 11) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank14]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 26) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 24) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnsedistributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: Traceback (most recent call last): (RANK 12) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: Traceback (most recent call last): (RANK 30) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 17) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 27) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^apes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 13) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 31) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dis: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 27) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 18) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: Traceback (most recent call last): (RANK 22) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight + +h.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 19) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: local_plan = planner.create_local_plan() +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 23) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 28) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torcrver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 29) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 20) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 25) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 24) +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1da/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planneapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 22) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 21) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 26) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: Traceback (most recent call last): (RANK 22) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 23) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 22) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 23) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: ^^^^^^^^^ +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 24) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnset_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 28) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 24) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 14) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 23) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planne^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 15) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 24) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 29) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 28) +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 16) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 25) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.distrver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 29) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 30) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 26) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: Traceback (most recent call last): (RANK 25) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank17]: Traceback (most recent call last): (RANK 31) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 27) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 26) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight + +rver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: Traceback (most recent call last): (RANK 28) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 25) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank24]: Traceback (most recent call last): (RANK 28) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingExceptionh.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 19) +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 29) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 29) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 26) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingExceptiondistributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: Traceback (most recent call last): (RANK 30) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 20) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: Traceback (most recent call last): (RANK 28) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 17) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 31) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 29) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 21) +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 18) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight + +t_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 14) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/jun: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.distr.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torc: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 27) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank10]: Traceback (most recent call last): (RANK 27) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 25) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: local_data = map_fun() +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 28) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 15) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 26) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 28) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 16) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 29) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: Traceback (most recent call last): (RANK 29) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 27) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^r.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1h.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 19) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 17) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 30) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 25) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank23]: ^^^^^^^^^ +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 18) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 31) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 26) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 20) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 19) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: raise CheckpointingException(_msg) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 21) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight + +: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 27) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 27) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: Traceback (most recent call last): (RANK 20) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 30) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 22) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 21) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: Traceback (most recent call last): (RANK 28) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 31) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 23) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 29) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight + +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 24) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: Traceback (most recent call last): (RANK 22) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1da/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 22) +_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 30) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 23) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 31) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank23]: Traceback (most recent call last): (RANK 25) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 23) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 26) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 24) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight + +_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 30) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 24) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 31) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 25) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnse0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: Traceback (most recent call last): (RANK 27) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank10]: Traceback (most recent call last): (RANK 30) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight + +^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 28) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 26) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 31) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 28) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 27) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank5]: ^^^^^^^^^ +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 29) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^ +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight + +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 30) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 29) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 30) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 28) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 29) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 31) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 28) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 31) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight + +rver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight + +8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 30) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 30) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 31) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: Traceback (most recent call last): (RANK 25) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 29) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 26) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 30) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank18]: Traceback (most recent call last): (RANK 31) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight + +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight + +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: raise CheckpointingException(_msg) +[rank19]:[W621 21:08:19.361865173 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank12]: Traceback (most recent call last): (RANK 27) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 31) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 28) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight + +_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 30) +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 29) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 31) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 30) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight + +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 31) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([12288, 4096])) and expected ((16384, 4096)) tensor for key embedding.position_embeddings.weight + +[rank21]:[W621 21:08:19.415839098 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank8]:[W621 21:08:19.741957340 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank6]:[W621 21:08:19.904388406 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank26]:[W621 21:08:19.110004953 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank9]:[W621 21:08:19.850076904 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank29]:[W621 21:08:19.179893602 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank30]:[W621 21:08:19.188471287 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank2]:[W621 21:08:19.010711136 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank31]:[W621 21:08:19.240299647 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank0]:[W621 21:08:19.040897009 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank4]:[W621 21:08:19.042677375 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank1]:[W621 21:08:19.051774026 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank10]:[W621 21:08:19.938782100 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank12]:[W621 21:08:19.951383535 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank3]:[W621 21:08:19.077955927 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank11]:[W621 21:08:19.964172345 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank14]:[W621 21:08:19.966627268 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank13]:[W621 21:08:19.968075151 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank15]:[W621 21:08:19.978162421 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank25]:[W621 21:08:19.304060415 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank28]:[W621 21:08:19.304685607 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank5]:[W621 21:08:19.103158048 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank27]:[W621 21:08:19.327570142 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank20]:[W621 21:08:19.722738324 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank24]:[W621 21:08:19.337935567 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank16]:[W621 21:08:19.806222176 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank7]:[W621 21:08:19.223012742 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank23]:[W621 21:08:19.850031383 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank17]:[W621 21:08:19.873158197 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank22]:[W621 21:08:20.908385445 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank18]:[W621 21:08:20.072097291 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +W0621 21:08:20.504000 1960889 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 1960960 closing signal SIGTERM +W0621 21:08:20.506000 1960889 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 1960962 closing signal SIGTERM +W0621 21:08:20.507000 1960889 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 1960963 closing signal SIGTERM +W0621 21:08:20.519000 2156937 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2157007 closing signal SIGTERM +W0621 21:08:20.507000 1960889 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 1960964 closing signal SIGTERM +W0621 21:08:20.520000 2156937 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2157008 closing signal SIGTERM +W0621 21:08:20.507000 1960889 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 1960965 closing signal SIGTERM +W0621 21:08:20.521000 2156937 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2157009 closing signal SIGTERM +W0621 21:08:20.508000 1960889 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 1960966 closing signal SIGTERM +W0621 21:08:20.521000 2156937 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2157010 closing signal SIGTERM +W0621 21:08:20.508000 1960889 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 1960967 closing signal SIGTERM +W0621 21:08:20.522000 2156937 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2157011 closing signal SIGTERM +W0621 21:08:20.522000 2156937 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2157012 closing signal SIGTERM +W0621 21:08:20.523000 2156937 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2157014 closing signal SIGTERM +W0621 21:08:20.636000 1679871 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 1679941 closing signal SIGTERM +W0621 21:08:20.637000 1679871 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 1679942 closing signal SIGTERM +W0621 21:08:20.637000 1679871 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 1679943 closing signal SIGTERM +W0621 21:08:20.638000 1679871 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 1679945 closing signal SIGTERM +W0621 21:08:20.638000 1679871 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 1679946 closing signal SIGTERM +W0621 21:08:20.638000 1679871 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 1679947 closing signal SIGTERM +W0621 21:08:20.639000 1679871 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 1679948 closing signal SIGTERM +W0621 21:08:20.716000 697279 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 697352 closing signal SIGTERM +W0621 21:08:20.716000 697279 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 697353 closing signal SIGTERM +W0621 21:08:20.717000 697279 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 697354 closing signal SIGTERM +W0621 21:08:20.717000 697279 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 697355 closing signal SIGTERM +W0621 21:08:20.718000 697279 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 697356 closing signal SIGTERM +W0621 21:08:20.718000 697279 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 697357 closing signal SIGTERM +W0621 21:08:20.719000 697279 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 697359 closing signal SIGTERM +E0621 21:08:21.161000 2156937 site-packages/torch/distributed/elastic/multiprocessing/api.py:874] failed (exitcode: 1) local_rank: 6 (pid: 2157013) of binary: /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/bin/python3 +Traceback (most recent call last): + File "", line 198, in _run_module_as_main + File "", line 88, in _run_code + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 207, in +E0621 21:08:21.200000 1960889 site-packages/torch/distributed/elastic/multiprocessing/api.py:874] failed (exitcode: 1) local_rank: 1 (pid: 1960961) of binary: /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/bin/python3 + main() + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper + return arg(*args, **kwargs) + ^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 203, in main + launch(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 188, in launch + run(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/run.py", line 883, in run +Traceback (most recent call last): + File "", line 198, in _run_module_as_main + File "", line 88, in _run_code + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 207, in + elastic_launch( + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 139, in __call__ + main() + return launch_agent(self._config, self._entrypoint, list(args)) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 270, in launch_agent + return arg(*args, **kwargs) + raise ChildFailedError( +torch.distributed.elastic.multiprocessing.errors.ChildFailedError: +============================================================ +./pretrain_gpt_profile.py FAILED +------------------------------------------------------------ +Failures: + +------------------------------------------------------------ +Root Cause (first observed failure): +[0]: + time : 2025-06-21_21:08:20 + host : fs-mbz-gpu-768 + rank : 30 (local_rank: 6) + exitcode : 1 (pid: 2157013) + error_file: + traceback : To enable traceback see: https://pytorch.org/docs/stable/elastic/errors.html + ^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 203, in main +============================================================ + launch(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 188, in launch + run(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/run.py", line 883, in run + elastic_launch( + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 139, in __call__ + return launch_agent(self._config, self._entrypoint, list(args)) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 270, in launch_agent + raise ChildFailedError( +torch.distributed.elastic.multiprocessing.errors.ChildFailedError: +============================================================ +./pretrain_gpt_profile.py FAILED +------------------------------------------------------------ +Failures: + +------------------------------------------------------------ +Root Cause (first observed failure): +[0]: + time : 2025-06-21_21:08:20 + host : fs-mbz-gpu-702 + rank : 9 (local_rank: 1) + exitcode : 1 (pid: 1960961) + error_file: + traceback : To enable traceback see: https://pytorch.org/docs/stable/elastic/errors.html +============================================================ +E0621 21:08:21.331000 1679871 site-packages/torch/distributed/elastic/multiprocessing/api.py:874] failed (exitcode: 1) local_rank: 3 (pid: 1679944) of binary: /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/bin/python3 +Traceback (most recent call last): + File "", line 198, in _run_module_as_main + File "", line 88, in _run_code + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 207, in + main() + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper + return arg(*args, **kwargs) + ^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 203, in main + launch(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 188, in launch + run(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/run.py", line 883, in run + elastic_launch( + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 139, in __call__ + return launch_agent(self._config, self._entrypoint, list(args)) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 270, in launch_agent + raise ChildFailedError( +torch.distributed.elastic.multiprocessing.errors.ChildFailedError: +============================================================ +./pretrain_gpt_profile.py FAILED +------------------------------------------------------------ +Failures: + +------------------------------------------------------------ +Root Cause (first observed failure): +[0]: + time : 2025-06-21_21:08:20 + host : fs-mbz-gpu-717 + rank : 19 (local_rank: 3) + exitcode : 1 (pid: 1679944) + error_file: + traceback : To enable traceback see: https://pytorch.org/docs/stable/elastic/errors.html +============================================================ ++ set +x +E0621 21:08:21.511000 697279 site-packages/torch/distributed/elastic/multiprocessing/api.py:874] failed (exitcode: 1) local_rank: 6 (pid: 697358) of binary: /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/bin/python3 ++ set +x +Traceback (most recent call last): + File "", line 198, in _run_module_as_main + File "", line 88, in _run_code + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 207, in + main() + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper + return arg(*args, **kwargs) + ^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 203, in main + launch(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 188, in launch + run(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/run.py", line 883, in run + elastic_launch( + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 139, in __call__ + return launch_agent(self._config, self._entrypoint, list(args)) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 270, in launch_agent + raise ChildFailedError( +torch.distributed.elastic.multiprocessing.errors.ChildFailedError: +============================================================ +./pretrain_gpt_profile.py FAILED +------------------------------------------------------------ +Failures: + +------------------------------------------------------------ +Root Cause (first observed failure): +[0]: + time : 2025-06-21_21:08:20 + host : fs-mbz-gpu-600 + rank : 6 (local_rank: 6) + exitcode : 1 (pid: 697358) + error_file: + traceback : To enable traceback see: https://pytorch.org/docs/stable/elastic/errors.html +============================================================ ++ set +x ++ set +x ++ for ctx_length in 1024 2048 4096 8192 12288 16384 24576 32768 40960 49152 65536 81920 98304 131072 ++ export PROF_CTX_LENGTH=24576 ++ PROF_CTX_LENGTH=24576 ++ name='/mnt/sharefs/users/hao.zhang/junda/megatron-prof-data--unstable-v5/mytrace.L24576*tp8.cp4.bs16.json' ++ '[' -f '/mnt/sharefs/users/hao.zhang/junda/megatron-prof-data--unstable-v5/mytrace.L24576*tp8.cp4.bs16.json' ']' ++ echo 'Running ctx_length=24576, TP_SIZE=8, CP_SIZE=4, BATCH_SIZE=16' ++ srun bash ./attnserver.sh ++ which python3 ++ python3 -m torch.distributed.launch --nproc_per_node 8 --nnodes 4 --node_rank 0 --rdzv_id 343199 --rdzv_backend c10d --rdzv_endpoint fs-mbz-gpu-600:29500 ./pretrain_gpt_profile.py --tensor-model-parallel-size 8 --context-parallel-size 4 --num-layers 2 --hidden-size 4096 --num-attention-heads 64 --group-query-attention --num-query-groups 16 --seq-length 24576 --max-position-embeddings 24576 --micro-batch-size 1 --global-batch-size 1 --lr 0.0005 --train-iters 10 --lr-decay-iters 150000 --lr-decay-style cosine --lr-warmup-iters 2 --weight-decay .1 --adam-beta2 .999 --fp16 --log-interval 1 --save-interval 16 --eval-interval 16 --eval-iters 1 --vocab-file vocab.json --merge-file merges.txt --save gpt-checkpoint --load gpt-checkpoint --logging-level 0 --mock-data --tensorboard-dir tensorboard-logs/ ++ which python3 ++ python3 -m torch.distributed.launch --nproc_per_node 8 --nnodes 4 --node_rank 1 --rdzv_id 343199 --rdzv_backend c10d --rdzv_endpoint fs-mbz-gpu-600:29500 ./pretrain_gpt_profile.py --tensor-model-parallel-size 8 --context-parallel-size 4 --num-layers 2 --hidden-size 4096 --num-attention-heads 64 --group-query-attention --num-query-groups 16 --seq-length 24576 --max-position-embeddings 24576 --micro-batch-size 1 --global-batch-size 1 --lr 0.0005 --train-iters 10 --lr-decay-iters 150000 --lr-decay-style cosine --lr-warmup-iters 2 --weight-decay .1 --adam-beta2 .999 --fp16 --log-interval 1 --save-interval 16 --eval-interval 16 --eval-iters 1 --vocab-file vocab.json --merge-file merges.txt --save gpt-checkpoint --load gpt-checkpoint --logging-level 0 --mock-data --tensorboard-dir tensorboard-logs/ ++ which python3 ++ which python3 ++ python3 -m torch.distributed.launch --nproc_per_node 8 --nnodes 4 --node_rank 3 --rdzv_id 343199 --rdzv_backend c10d --rdzv_endpoint fs-mbz-gpu-600:29500 ./pretrain_gpt_profile.py --tensor-model-parallel-size 8 --context-parallel-size 4 --num-layers 2 --hidden-size 4096 --num-attention-heads 64 --group-query-attention --num-query-groups 16 --seq-length 24576 --max-position-embeddings 24576 --micro-batch-size 1 --global-batch-size 1 --lr 0.0005 --train-iters 10 --lr-decay-iters 150000 --lr-decay-style cosine --lr-warmup-iters 2 --weight-decay .1 --adam-beta2 .999 --fp16 --log-interval 1 --save-interval 16 --eval-interval 16 --eval-iters 1 --vocab-file vocab.json --merge-file merges.txt --save gpt-checkpoint --load gpt-checkpoint --logging-level 0 --mock-data --tensorboard-dir tensorboard-logs/ ++ python3 -m torch.distributed.launch --nproc_per_node 8 --nnodes 4 --node_rank 2 --rdzv_id 343199 --rdzv_backend c10d --rdzv_endpoint fs-mbz-gpu-600:29500 ./pretrain_gpt_profile.py --tensor-model-parallel-size 8 --context-parallel-size 4 --num-layers 2 --hidden-size 4096 --num-attention-heads 64 --group-query-attention --num-query-groups 16 --seq-length 24576 --max-position-embeddings 24576 --micro-batch-size 1 --global-batch-size 1 --lr 0.0005 --train-iters 10 --lr-decay-iters 150000 --lr-decay-style cosine --lr-warmup-iters 2 --weight-decay .1 --adam-beta2 .999 --fp16 --log-interval 1 --save-interval 16 --eval-interval 16 --eval-iters 1 --vocab-file vocab.json --merge-file merges.txt --save gpt-checkpoint --load gpt-checkpoint --logging-level 0 --mock-data --tensorboard-dir tensorboard-logs/ +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py:207: FutureWarning: The module torch.distributed.launch is deprecated +and will be removed in future. Use torchrun. +Note that --use-env is set by default in torchrun. +If your script expects `--local-rank` argument to be set, please +change it to read from `os.environ['LOCAL_RANK']` instead. See +https://pytorch.org/docs/stable/distributed.html#launch-utility for +further instructions + + main() +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py:207: FutureWarning: The module torch.distributed.launch is deprecated +and will be removed in future. Use torchrun. +Note that --use-env is set by default in torchrun. +If your script expects `--local-rank` argument to be set, please +change it to read from `os.environ['LOCAL_RANK']` instead. See +https://pytorch.org/docs/stable/distributed.html#launch-utility for +further instructions + + main() +W0621 21:08:25.347000 1962734 site-packages/torch/distributed/run.py:766] +W0621 21:08:25.347000 1962734 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:08:25.347000 1962734 site-packages/torch/distributed/run.py:766] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0621 21:08:25.347000 1962734 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:08:25.347000 2158764 site-packages/torch/distributed/run.py:766] +W0621 21:08:25.347000 2158764 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:08:25.347000 2158764 site-packages/torch/distributed/run.py:766] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0621 21:08:25.347000 2158764 site-packages/torch/distributed/run.py:766] ***************************************** +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py:207: FutureWarning: The module torch.distributed.launch is deprecated +and will be removed in future. Use torchrun. +Note that --use-env is set by default in torchrun. +If your script expects `--local-rank` argument to be set, please +change it to read from `os.environ['LOCAL_RANK']` instead. See +https://pytorch.org/docs/stable/distributed.html#launch-utility for +further instructions + + main() +W0621 21:08:25.363000 699177 site-packages/torch/distributed/run.py:766] +W0621 21:08:25.363000 699177 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:08:25.363000 699177 site-packages/torch/distributed/run.py:766] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0621 21:08:25.363000 699177 site-packages/torch/distributed/run.py:766] ***************************************** +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py:207: FutureWarning: The module torch.distributed.launch is deprecated +and will be removed in future. Use torchrun. +Note that --use-env is set by default in torchrun. +If your script expects `--local-rank` argument to be set, please +change it to read from `os.environ['LOCAL_RANK']` instead. See +https://pytorch.org/docs/stable/distributed.html#launch-utility for +further instructions + + main() +W0621 21:08:25.370000 1681715 site-packages/torch/distributed/run.py:766] +W0621 21:08:25.370000 1681715 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:08:25.370000 1681715 site-packages/torch/distributed/run.py:766] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0621 21:08:25.370000 1681715 site-packages/torch/distributed/run.py:766] ***************************************** +[rank24]:[W621 21:08:49.066840513 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 24] using GPU 0 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank16]:[W621 21:08:49.459582641 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 16] using GPU 0 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank0]:[W621 21:08:49.905891485 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 0] using GPU 0 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank8]:[W621 21:08:49.042118245 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 8] using GPU 0 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank11]:[W621 21:08:49.046999454 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 11] using GPU 3 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank19]:[W621 21:08:49.758444392 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 19] using GPU 3 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank12]:[W621 21:08:49.048704313 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 12] using GPU 4 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank20]:[W621 21:08:49.759225809 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 20] using GPU 4 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank28]:[W621 21:08:49.371914569 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 28] using GPU 4 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank27]:[W621 21:08:49.372247948 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 27] using GPU 3 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank15]:[W621 21:08:49.057731705 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 15] using GPU 7 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank31]:[W621 21:08:49.381037430 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 31] using GPU 7 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank23]:[W621 21:08:49.768625830 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 23] using GPU 7 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank17]:[W621 21:08:49.770201263 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 17] using GPU 1 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank9]:[W621 21:08:49.060429489 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 9] using GPU 1 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank25]:[W621 21:08:49.386614619 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 25] using GPU 1 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank21]:[W621 21:08:49.775577267 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 21] using GPU 5 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank29]:[W621 21:08:49.388197251 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 29] using GPU 5 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank22]:[W621 21:08:49.778388202 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 22] using GPU 6 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank14]:[W621 21:08:49.068181487 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 14] using GPU 6 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank13]:[W621 21:08:49.068330904 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 13] using GPU 5 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank18]:[W621 21:08:49.779702824 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 18] using GPU 2 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank10]:[W621 21:08:49.071545568 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 10] using GPU 2 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank30]:[W621 21:08:49.394548014 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 30] using GPU 6 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank26]:[W621 21:08:49.401797023 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 26] using GPU 2 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank3]:[W621 21:08:49.219445398 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 3] using GPU 3 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank4]:[W621 21:08:49.219449020 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 4] using GPU 4 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank2]:[W621 21:08:49.219493182 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 2] using GPU 2 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank1]:[W621 21:08:49.219530725 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 1] using GPU 1 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank6]:[W621 21:08:49.219563224 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 6] using GPU 6 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank5]:[W621 21:08:49.219595352 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 5] using GPU 5 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank7]:[W621 21:08:49.219672992 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 7] using GPU 7 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device.