diff --git "a/attnserver.run_attnserver.slurm.sh.343200.err.log" "b/attnserver.run_attnserver.slurm.sh.343200.err.log" --- "a/attnserver.run_attnserver.slurm.sh.343200.err.log" +++ "b/attnserver.run_attnserver.slurm.sh.343200.err.log" @@ -30631,3 +30631,18489 @@ W0621 21:16:49.881000 3363945 site-packages/torch/distributed/run.py:766] ****** warnings.warn( /mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +[rank2]: Traceback (most recent call last): +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank2]: pretrain( +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank2]: iteration, num_floating_point_operations_so_far = train( +[rank2]: ^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank2]: ) = train_step( +[rank2]: ^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank2]: losses_reduced = forward_backward_func( +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank2]: output_tensor, num_tokens = forward_step( +[rank2]: ^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank2]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank2]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank2]: batch = next(global_batches) +[rank2]: ^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank2]: attention_mask = torch.ones( +[rank2]: ^^^^^^^^^^^ +[rank2]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 204800.00 GiB. GPU 2 has a total capacity of 139.81 GiB of which 133.17 GiB is free. Including non-PyTorch memory, this process has 6.63 GiB memory in use. Of the allocated memory 4.59 GiB is allocated by PyTorch, and 559.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank6]: Traceback (most recent call last): +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank6]: pretrain( +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank6]: iteration, num_floating_point_operations_so_far = train( +[rank6]: ^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank6]: ) = train_step( +[rank6]: ^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank6]: losses_reduced = forward_backward_func( +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank6]: output_tensor, num_tokens = forward_step( +[rank6]: ^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank6]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank6]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank6]: batch = next(global_batches) +[rank6]: ^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank6]: attention_mask = torch.ones( +[rank6]: ^^^^^^^^^^^ +[rank6]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 204800.00 GiB. GPU 6 has a total capacity of 139.81 GiB of which 133.17 GiB is free. Including non-PyTorch memory, this process has 6.63 GiB memory in use. Of the allocated memory 4.59 GiB is allocated by PyTorch, and 559.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank3]: Traceback (most recent call last): +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank3]: pretrain( +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank3]: iteration, num_floating_point_operations_so_far = train( +[rank3]: ^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank3]: ) = train_step( +[rank3]: ^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank3]: losses_reduced = forward_backward_func( +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank3]: output_tensor, num_tokens = forward_step( +[rank3]: ^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank3]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank3]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank3]: batch = next(global_batches) +[rank3]: ^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank3]: attention_mask = torch.ones( +[rank3]: ^^^^^^^^^^^ +[rank3]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 204800.00 GiB. GPU 3 has a total capacity of 139.81 GiB of which 133.16 GiB is free. Including non-PyTorch memory, this process has 6.65 GiB memory in use. Of the allocated memory 4.59 GiB is allocated by PyTorch, and 559.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank7]: Traceback (most recent call last): +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank7]: pretrain( +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank7]: iteration, num_floating_point_operations_so_far = train( +[rank7]: ^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank7]: ) = train_step( +[rank7]: ^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank7]: losses_reduced = forward_backward_func( +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank7]: output_tensor, num_tokens = forward_step( +[rank7]: ^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank7]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank7]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank7]: batch = next(global_batches) +[rank7]: ^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank7]: attention_mask = torch.ones( +[rank7]: ^^^^^^^^^^^ +[rank7]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 204800.00 GiB. GPU 7 has a total capacity of 139.81 GiB of which 133.16 GiB is free. Including non-PyTorch memory, this process has 6.65 GiB memory in use. Of the allocated memory 4.59 GiB is allocated by PyTorch, and 559.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank5]: Traceback (most recent call last): +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank5]: pretrain( +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank5]: iteration, num_floating_point_operations_so_far = train( +[rank5]: ^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank5]: ) = train_step( +[rank5]: ^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank5]: losses_reduced = forward_backward_func( +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank5]: output_tensor, num_tokens = forward_step( +[rank5]: ^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank5]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank5]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank5]: batch = next(global_batches) +[rank5]: ^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank5]: attention_mask = torch.ones( +[rank5]: ^^^^^^^^^^^ +[rank5]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 204800.00 GiB. GPU 5 has a total capacity of 139.81 GiB of which 133.16 GiB is free. Including non-PyTorch memory, this process has 6.65 GiB memory in use. Of the allocated memory 4.59 GiB is allocated by PyTorch, and 559.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank0]: Traceback (most recent call last): +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank0]: pretrain( +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank0]: iteration, num_floating_point_operations_so_far = train( +[rank0]: ^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank0]: ) = train_step( +[rank0]: ^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank0]: losses_reduced = forward_backward_func( +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank0]: output_tensor, num_tokens = forward_step( +[rank0]: ^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank0]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank0]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank0]: batch = next(global_batches) +[rank0]: ^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank0]: attention_mask = torch.ones( +[rank0]: ^^^^^^^^^^^ +[rank0]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 204800.00 GiB. GPU 0 has a total capacity of 139.81 GiB of which 133.17 GiB is free. Including non-PyTorch memory, this process has 6.63 GiB memory in use. Of the allocated memory 4.59 GiB is allocated by PyTorch, and 559.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank13]: Traceback (most recent call last): +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank13]: pretrain( +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank13]: iteration, num_floating_point_operations_so_far = train( +[rank13]: ^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank13]: ) = train_step( +[rank13]: ^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank13]: losses_reduced = forward_backward_func( +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: Traceback (most recent call last): +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank1]: pretrain( +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank1]: iteration, num_floating_point_operations_so_far = train( +[rank1]: ^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank1]: ) = train_step( +[rank1]: ^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank1]: losses_reduced = forward_backward_func( +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank13]: output_tensor, num_tokens = forward_step( +[rank13]: ^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank13]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank13]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: output_tensor, num_tokens = forward_step( +[rank1]: ^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank1]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank1]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank1]: batch = next(global_batches) +[rank1]: ^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank13]: batch = next(global_batches) +[rank13]: ^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank13]: attention_mask = torch.ones( +[rank13]: ^^^^^^^^^^^ +[rank13]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 204800.00 GiB. GPU 5 has a total capacity of 139.81 GiB of which 133.15 GiB is free. Including non-PyTorch memory, this process has 6.65 GiB memory in use. Of the allocated memory 4.59 GiB is allocated by PyTorch, and 559.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank1]: attention_mask = torch.ones( +[rank1]: ^^^^^^^^^^^ +[rank1]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 204800.00 GiB. GPU 1 has a total capacity of 139.81 GiB of which 133.16 GiB is free. Including non-PyTorch memory, this process has 6.65 GiB memory in use. Of the allocated memory 4.59 GiB is allocated by PyTorch, and 559.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank9]: Traceback (most recent call last): +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank9]: pretrain( +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank9]: iteration, num_floating_point_operations_so_far = train( +[rank9]: ^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank9]: ) = train_step( +[rank9]: ^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank9]: losses_reduced = forward_backward_func( +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank4]: Traceback (most recent call last): +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank4]: pretrain( +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank4]: iteration, num_floating_point_operations_so_far = train( +[rank4]: ^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank4]: ) = train_step( +[rank4]: ^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank4]: losses_reduced = forward_backward_func( +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank25]: Traceback (most recent call last): +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank25]: pretrain( +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank25]: iteration, num_floating_point_operations_so_far = train( +[rank25]: ^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank25]: ) = train_step( +[rank25]: ^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank25]: losses_reduced = forward_backward_func( +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: output_tensor, num_tokens = forward_step( +[rank9]: ^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank9]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank9]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank9]: batch = next(global_batches) +[rank9]: ^^^^^^^^^^^^^^^^^^^^ +[rank4]: output_tensor, num_tokens = forward_step( +[rank4]: ^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank4]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank4]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank4]: batch = next(global_batches) +[rank4]: ^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank25]: output_tensor, num_tokens = forward_step( +[rank25]: ^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank25]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank25]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank9]: attention_mask = torch.ones( +[rank9]: ^^^^^^^^^^^ +[rank9]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 204800.00 GiB. GPU 1 has a total capacity of 139.81 GiB of which 133.15 GiB is free. Including non-PyTorch memory, this process has 6.65 GiB memory in use. Of the allocated memory 4.59 GiB is allocated by PyTorch, and 559.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank4]: attention_mask = torch.ones( +[rank4]: ^^^^^^^^^^^ +[rank4]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 204800.00 GiB. GPU 4 has a total capacity of 139.81 GiB of which 133.17 GiB is free. Including non-PyTorch memory, this process has 6.63 GiB memory in use. Of the allocated memory 4.59 GiB is allocated by PyTorch, and 559.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank25]: batch = next(global_batches) +[rank25]: ^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank25]: attention_mask = torch.ones( +[rank25]: ^^^^^^^^^^^ +[rank25]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 204800.00 GiB. GPU 1 has a total capacity of 139.81 GiB of which 133.17 GiB is free. Including non-PyTorch memory, this process has 6.63 GiB memory in use. Of the allocated memory 4.59 GiB is allocated by PyTorch, and 559.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank22]: Traceback (most recent call last): +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank22]: pretrain( +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank22]: iteration, num_floating_point_operations_so_far = train( +[rank22]: ^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank22]: ) = train_step( +[rank22]: ^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank22]: losses_reduced = forward_backward_func( +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: Traceback (most recent call last): +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank30]: pretrain( +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank30]: iteration, num_floating_point_operations_so_far = train( +[rank30]: ^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank30]: ) = train_step( +[rank30]: ^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank30]: losses_reduced = forward_backward_func( +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank22]: output_tensor, num_tokens = forward_step( +[rank22]: ^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank22]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank22]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank30]: output_tensor, num_tokens = forward_step( +[rank30]: ^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank22]: batch = next(global_batches) +[rank22]: ^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank22]: attention_mask = torch.ones( +[rank22]: ^^^^^^^^^^^ +[rank22]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 204800.00 GiB. GPU 6 has a total capacity of 139.81 GiB of which 133.15 GiB is free. Including non-PyTorch memory, this process has 6.65 GiB memory in use. Of the allocated memory 4.59 GiB is allocated by PyTorch, and 559.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank30]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank30]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank30]: batch = next(global_batches) +[rank30]: ^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank30]: attention_mask = torch.ones( +[rank30]: ^^^^^^^^^^^ +[rank12]: Traceback (most recent call last): +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank12]: pretrain( +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank12]: iteration, num_floating_point_operations_so_far = train( +[rank12]: ^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank12]: ) = train_step( +[rank12]: ^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank12]: losses_reduced = forward_backward_func( +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: Traceback (most recent call last): +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank18]: pretrain( +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank18]: iteration, num_floating_point_operations_so_far = train( +[rank18]: ^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank18]: ) = train_step( +[rank18]: ^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank18]: losses_reduced = forward_backward_func( +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 204800.00 GiB. GPU 6 has a total capacity of 139.81 GiB of which 133.16 GiB is free. Including non-PyTorch memory, this process has 6.65 GiB memory in use. Of the allocated memory 4.59 GiB is allocated by PyTorch, and 559.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank12]: output_tensor, num_tokens = forward_step( +[rank12]: ^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank12]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank12]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank18]: output_tensor, num_tokens = forward_step( +[rank18]: ^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank26]: Traceback (most recent call last): +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank26]: pretrain( +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank26]: iteration, num_floating_point_operations_so_far = train( +[rank26]: ^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank26]: ) = train_step( +[rank26]: ^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank26]: losses_reduced = forward_backward_func( +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank12]: batch = next(global_batches) +[rank12]: ^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank12]: attention_mask = torch.ones( +[rank12]: ^^^^^^^^^^^ +[rank12]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 204800.00 GiB. GPU 4 has a total capacity of 139.81 GiB of which 133.14 GiB is free. Including non-PyTorch memory, this process has 6.67 GiB memory in use. Of the allocated memory 4.59 GiB is allocated by PyTorch, and 559.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank18]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank18]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank18]: batch = next(global_batches) +[rank18]: ^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank18]: attention_mask = torch.ones( +[rank18]: ^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank26]: output_tensor, num_tokens = forward_step( +[rank26]: ^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank26]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank26]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 204800.00 GiB. GPU 2 has a total capacity of 139.81 GiB of which 133.15 GiB is free. Including non-PyTorch memory, this process has 6.65 GiB memory in use. Of the allocated memory 4.59 GiB is allocated by PyTorch, and 559.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank19]: Traceback (most recent call last): +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank19]: pretrain( +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank19]: iteration, num_floating_point_operations_so_far = train( +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank26]: batch = next(global_batches) +[rank26]: ^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank26]: attention_mask = torch.ones( +[rank26]: ^^^^^^^^^^^ +[rank26]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 204800.00 GiB. GPU 2 has a total capacity of 139.81 GiB of which 133.16 GiB is free. Including non-PyTorch memory, this process has 6.65 GiB memory in use. Of the allocated memory 4.59 GiB is allocated by PyTorch, and 559.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank19]: ^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank19]: ) = train_step( +[rank19]: ^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank19]: losses_reduced = forward_backward_func( +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank19]: output_tensor, num_tokens = forward_step( +[rank19]: ^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank19]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank27]: Traceback (most recent call last): +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank27]: pretrain( +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank27]: iteration, num_floating_point_operations_so_far = train( +[rank27]: ^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank27]: ) = train_step( +[rank27]: ^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank27]: losses_reduced = forward_backward_func( +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank19]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank19]: batch = next(global_batches) +[rank19]: ^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank19]: attention_mask = torch.ones( +[rank19]: ^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank27]: output_tensor, num_tokens = forward_step( +[rank27]: ^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank11]: Traceback (most recent call last): +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank11]: pretrain( +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank11]: iteration, num_floating_point_operations_so_far = train( +[rank11]: ^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank11]: ) = train_step( +[rank11]: ^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank11]: losses_reduced = forward_backward_func( +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 204800.00 GiB. GPU 3 has a total capacity of 139.81 GiB of which 133.14 GiB is free. Including non-PyTorch memory, this process has 6.67 GiB memory in use. Of the allocated memory 4.59 GiB is allocated by PyTorch, and 559.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank23]: Traceback (most recent call last): +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank23]: pretrain( +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank23]: iteration, num_floating_point_operations_so_far = train( +[rank27]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank27]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank27]: batch = next(global_batches) +[rank27]: ^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank27]: attention_mask = torch.ones( +[rank27]: ^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank11]: output_tensor, num_tokens = forward_step( +[rank11]: ^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank11]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank11]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: ^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank23]: ) = train_step( +[rank23]: ^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank23]: losses_reduced = forward_backward_func( +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank23]: output_tensor, num_tokens = forward_step( +[rank23]: ^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank23]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank27]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 204800.00 GiB. GPU 3 has a total capacity of 139.81 GiB of which 133.17 GiB is free. Including non-PyTorch memory, this process has 6.63 GiB memory in use. Of the allocated memory 4.59 GiB is allocated by PyTorch, and 559.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank31]: Traceback (most recent call last): +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank31]: pretrain( +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank31]: iteration, num_floating_point_operations_so_far = train( +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank11]: batch = next(global_batches) +[rank11]: ^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank11]: attention_mask = torch.ones( +[rank11]: ^^^^^^^^^^^ +[rank11]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 204800.00 GiB. GPU 3 has a total capacity of 139.81 GiB of which 133.15 GiB is free. Including non-PyTorch memory, this process has 6.65 GiB memory in use. Of the allocated memory 4.59 GiB is allocated by PyTorch, and 559.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank23]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank23]: batch = next(global_batches) +[rank23]: ^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank23]: attention_mask = torch.ones( +[rank23]: ^^^^^^^^^^^ +[rank31]: ^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank31]: ) = train_step( +[rank31]: ^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank31]: losses_reduced = forward_backward_func( +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank31]: output_tensor, num_tokens = forward_step( +[rank31]: ^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank31]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank15]: Traceback (most recent call last): +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank15]: pretrain( +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank15]: iteration, num_floating_point_operations_so_far = train( +[rank15]: ^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank15]: ) = train_step( +[rank15]: ^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank15]: losses_reduced = forward_backward_func( +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 204800.00 GiB. GPU 7 has a total capacity of 139.81 GiB of which 133.14 GiB is free. Including non-PyTorch memory, this process has 6.67 GiB memory in use. Of the allocated memory 4.59 GiB is allocated by PyTorch, and 559.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank31]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank31]: batch = next(global_batches) +[rank31]: ^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank31]: attention_mask = torch.ones( +[rank31]: ^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank15]: output_tensor, num_tokens = forward_step( +[rank15]: ^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank15]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank15]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: Traceback (most recent call last): +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank17]: pretrain( +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank17]: iteration, num_floating_point_operations_so_far = train( +[rank17]: ^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank17]: ) = train_step( +[rank17]: ^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank17]: losses_reduced = forward_backward_func( +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 204800.00 GiB. GPU 7 has a total capacity of 139.81 GiB of which 133.17 GiB is free. Including non-PyTorch memory, this process has 6.63 GiB memory in use. Of the allocated memory 4.59 GiB is allocated by PyTorch, and 559.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank15]: batch = next(global_batches) +[rank15]: ^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank15]: attention_mask = torch.ones( +[rank15]: ^^^^^^^^^^^ +[rank15]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 204800.00 GiB. GPU 7 has a total capacity of 139.81 GiB of which 133.15 GiB is free. Including non-PyTorch memory, this process has 6.65 GiB memory in use. Of the allocated memory 4.59 GiB is allocated by PyTorch, and 559.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank17]: output_tensor, num_tokens = forward_step( +[rank17]: ^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank17]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank17]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: Traceback (most recent call last): +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank24]: pretrain( +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank24]: iteration, num_floating_point_operations_so_far = train( +[rank24]: ^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank24]: ) = train_step( +[rank24]: ^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank24]: losses_reduced = forward_backward_func( +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: Traceback (most recent call last): +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank8]: pretrain( +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank8]: iteration, num_floating_point_operations_so_far = train( +[rank8]: ^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank8]: ) = train_step( +[rank8]: ^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank8]: losses_reduced = forward_backward_func( +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank17]: batch = next(global_batches) +[rank17]: ^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank17]: attention_mask = torch.ones( +[rank17]: ^^^^^^^^^^^ +[rank17]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 204800.00 GiB. GPU 1 has a total capacity of 139.81 GiB of which 133.14 GiB is free. Including non-PyTorch memory, this process has 6.67 GiB memory in use. Of the allocated memory 4.59 GiB is allocated by PyTorch, and 559.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank24]: output_tensor, num_tokens = forward_step( +[rank24]: ^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank24]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank24]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: output_tensor, num_tokens = forward_step( +[rank8]: ^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank21]: Traceback (most recent call last): +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank21]: pretrain( +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank21]: iteration, num_floating_point_operations_so_far = train( +[rank21]: ^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank21]: ) = train_step( +[rank21]: ^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank21]: losses_reduced = forward_backward_func( +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank24]: batch = next(global_batches) +[rank24]: ^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank24]: attention_mask = torch.ones( +[rank24]: ^^^^^^^^^^^ +[rank24]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 204800.00 GiB. GPU 0 has a total capacity of 139.81 GiB of which 133.16 GiB is free. Including non-PyTorch memory, this process has 6.65 GiB memory in use. Of the allocated memory 4.59 GiB is allocated by PyTorch, and 559.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank8]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank8]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank8]: batch = next(global_batches) +[rank8]: ^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank8]: attention_mask = torch.ones( +[rank8]: ^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank21]: output_tensor, num_tokens = forward_step( +[rank21]: ^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank21]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank21]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: Traceback (most recent call last): +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank29]: pretrain( +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank29]: iteration, num_floating_point_operations_so_far = train( +[rank29]: ^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank29]: ) = train_step( +[rank29]: ^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank29]: losses_reduced = forward_backward_func( +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 204800.00 GiB. GPU 0 has a total capacity of 139.81 GiB of which 133.14 GiB is free. Including non-PyTorch memory, this process has 6.67 GiB memory in use. Of the allocated memory 4.59 GiB is allocated by PyTorch, and 559.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank21]: batch = next(global_batches) +[rank21]: ^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank21]: attention_mask = torch.ones( +[rank21]: ^^^^^^^^^^^ +[rank21]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 204800.00 GiB. GPU 5 has a total capacity of 139.81 GiB of which 133.14 GiB is free. Including non-PyTorch memory, this process has 6.67 GiB memory in use. Of the allocated memory 4.59 GiB is allocated by PyTorch, and 559.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank29]: output_tensor, num_tokens = forward_step( +[rank29]: ^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank29]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank29]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: Traceback (most recent call last): +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank10]: pretrain( +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank10]: iteration, num_floating_point_operations_so_far = train( +[rank10]: ^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank10]: ) = train_step( +[rank10]: ^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank10]: losses_reduced = forward_backward_func( +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: Traceback (most recent call last): +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank20]: pretrain( +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank20]: iteration, num_floating_point_operations_so_far = train( +[rank20]: ^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank20]: ) = train_step( +[rank20]: ^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank20]: losses_reduced = forward_backward_func( +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank29]: batch = next(global_batches) +[rank29]: ^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank29]: attention_mask = torch.ones( +[rank29]: ^^^^^^^^^^^ +[rank29]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 204800.00 GiB. GPU 5 has a total capacity of 139.81 GiB of which 133.17 GiB is free. Including non-PyTorch memory, this process has 6.63 GiB memory in use. Of the allocated memory 4.59 GiB is allocated by PyTorch, and 559.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank10]: output_tensor, num_tokens = forward_step( +[rank10]: ^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank10]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank10]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank20]: output_tensor, num_tokens = forward_step( +[rank20]: ^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank20]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank20]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: Traceback (most recent call last): +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank28]: pretrain( +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank28]: iteration, num_floating_point_operations_so_far = train( +[rank28]: ^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank28]: ) = train_step( +[rank28]: ^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank28]: losses_reduced = forward_backward_func( +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank10]: batch = next(global_batches) +[rank10]: ^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank10]: attention_mask = torch.ones( +[rank10]: ^^^^^^^^^^^ +[rank10]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 204800.00 GiB. GPU 2 has a total capacity of 139.81 GiB of which 133.14 GiB is free. Including non-PyTorch memory, this process has 6.67 GiB memory in use. Of the allocated memory 4.59 GiB is allocated by PyTorch, and 559.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank20]: batch = next(global_batches) +[rank20]: ^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank20]: attention_mask = torch.ones( +[rank20]: ^^^^^^^^^^^ +[rank20]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 204800.00 GiB. GPU 4 has a total capacity of 139.81 GiB of which 133.15 GiB is free. Including non-PyTorch memory, this process has 6.65 GiB memory in use. Of the allocated memory 4.59 GiB is allocated by PyTorch, and 559.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank28]: output_tensor, num_tokens = forward_step( +[rank28]: ^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank28]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank28]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: Traceback (most recent call last): +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank14]: pretrain( +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank14]: iteration, num_floating_point_operations_so_far = train( +[rank14]: ^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank14]: ) = train_step( +[rank14]: ^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank14]: losses_reduced = forward_backward_func( +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: Traceback (most recent call last): +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank16]: pretrain( +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 863, in pretrain +[rank16]: iteration, num_floating_point_operations_so_far = train( +[rank16]: ^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 2229, in train +[rank16]: ) = train_step( +[rank16]: ^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1382, in train_step +[rank16]: losses_reduced = forward_backward_func( +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank28]: batch = next(global_batches) +[rank28]: ^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank28]: attention_mask = torch.ones( +[rank28]: ^^^^^^^^^^^ +[rank28]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 204800.00 GiB. GPU 4 has a total capacity of 139.81 GiB of which 133.16 GiB is free. Including non-PyTorch memory, this process has 6.65 GiB memory in use. Of the allocated memory 4.59 GiB is allocated by PyTorch, and 559.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank14]: output_tensor, num_tokens = forward_step( +[rank14]: ^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank14]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank14]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 518, in forward_backward_no_pipelining +[rank16]: output_tensor, num_tokens = forward_step( +[rank16]: ^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/pipeline_parallel/schedules.py", line 289, in forward_step +[rank16]: output_tensor, loss_func = forward_step_func(data_iterator, model) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 446, in forward_step +[rank16]: (tokens, labels, loss_mask, attention_mask, position_ids), token_lens = get_batch(data_iterator) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank14]: batch = next(global_batches) +[rank14]: ^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank14]: attention_mask = torch.ones( +[rank14]: ^^^^^^^^^^^ +[rank14]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 204800.00 GiB. GPU 6 has a total capacity of 139.81 GiB of which 133.14 GiB is free. Including non-PyTorch memory, this process has 6.67 GiB memory in use. Of the allocated memory 4.59 GiB is allocated by PyTorch, and 559.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 284, in get_batch +[rank16]: batch = next(global_batches) +[rank16]: ^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 226, in setup_batches +[rank16]: attention_mask = torch.ones( +[rank16]: ^^^^^^^^^^^ +[rank16]: torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 204800.00 GiB. GPU 0 has a total capacity of 139.81 GiB of which 133.15 GiB is free. Including non-PyTorch memory, this process has 6.65 GiB memory in use. Of the allocated memory 4.59 GiB is allocated by PyTorch, and 559.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +[rank1]:[W621 21:17:26.308497202 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank5]:[W621 21:17:26.319590417 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank6]:[W621 21:17:26.356352446 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank17]:[W621 21:17:26.755520221 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank3]:[W621 21:17:26.422657896 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank7]:[W621 21:17:26.422746601 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank12]:[W621 21:17:26.338910931 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank23]:[W621 21:17:26.796052542 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank21]:[W621 21:17:26.801418987 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank2]:[W621 21:17:26.462971685 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank13]:[W621 21:17:26.358959965 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank18]:[W621 21:17:26.818406212 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank10]:[W621 21:17:26.371863870 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank19]:[W621 21:17:26.844756175 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank31]:[W621 21:17:26.915292961 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank9]:[W621 21:17:26.414031367 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank29]:[W621 21:17:26.950911551 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank15]:[W621 21:17:26.444343158 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank26]:[W621 21:17:26.974488728 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank28]:[W621 21:17:26.991801632 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank27]:[W621 21:17:26.994382504 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank25]:[W621 21:17:26.996195647 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank30]:[W621 21:17:26.024428679 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank20]:[W621 21:17:26.963688053 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank4]:[W621 21:17:26.643183212 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank22]:[W621 21:17:26.996321535 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank11]:[W621 21:17:26.573747096 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank14]:[W621 21:17:26.614147238 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +W0621 21:17:27.461000 144078 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 144151 closing signal SIGTERM +W0621 21:17:27.464000 144078 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 144153 closing signal SIGTERM +W0621 21:17:27.465000 144078 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 144154 closing signal SIGTERM +W0621 21:17:27.465000 144078 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 144155 closing signal SIGTERM +W0621 21:17:27.466000 144078 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 144156 closing signal SIGTERM +W0621 21:17:27.466000 144078 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 144157 closing signal SIGTERM +W0621 21:17:27.466000 144078 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 144158 closing signal SIGTERM +W0621 21:17:27.490000 3432705 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3432775 closing signal SIGTERM +W0621 21:17:27.491000 2063251 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2063322 closing signal SIGTERM +W0621 21:17:27.493000 2063251 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2063323 closing signal SIGTERM +W0621 21:17:27.493000 3432705 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3432776 closing signal SIGTERM +W0621 21:17:27.494000 2063251 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2063324 closing signal SIGTERM +W0621 21:17:27.494000 3432705 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3432778 closing signal SIGTERM +W0621 21:17:27.495000 2063251 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2063325 closing signal SIGTERM +W0621 21:17:27.494000 3432705 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3432779 closing signal SIGTERM +W0621 21:17:27.495000 2063251 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2063326 closing signal SIGTERM +W0621 21:17:27.495000 2063251 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2063328 closing signal SIGTERM +W0621 21:17:27.495000 3432705 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3432781 closing signal SIGTERM +W0621 21:17:27.495000 2063251 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2063329 closing signal SIGTERM +W0621 21:17:27.495000 3432705 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3432782 closing signal SIGTERM +W0621 21:17:27.557000 3363945 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3364015 closing signal SIGTERM +W0621 21:17:27.559000 3363945 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3364018 closing signal SIGTERM +W0621 21:17:27.560000 3363945 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3364019 closing signal SIGTERM +W0621 21:17:27.560000 3363945 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3364020 closing signal SIGTERM +W0621 21:17:27.561000 3363945 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3364021 closing signal SIGTERM +W0621 21:17:27.561000 3363945 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3364022 closing signal SIGTERM +W0621 21:17:27.562000 3363945 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3364023 closing signal SIGTERM +E0621 21:17:27.794000 144078 site-packages/torch/distributed/elastic/multiprocessing/api.py:874] failed (exitcode: 1) local_rank: 1 (pid: 144152) of binary: /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/bin/python3 +E0621 21:17:27.823000 3432705 site-packages/torch/distributed/elastic/multiprocessing/api.py:874] failed (exitcode: 1) local_rank: 2 (pid: 3432777) of binary: /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/bin/python3 +Traceback (most recent call last): + File "", line 198, in _run_module_as_main + File "", line 88, in _run_code + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 207, in + main() + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper + return arg(*args, **kwargs) + ^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 203, in main + launch(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 188, in launch + run(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/run.py", line 883, in run + elastic_launch( + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 139, in __call__ + return launch_agent(self._config, self._entrypoint, list(args)) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 270, in launch_agent + raise ChildFailedError( +torch.distributed.elastic.multiprocessing.errors.ChildFailedError: +============================================================ +./pretrain_gpt_profile.py FAILED +------------------------------------------------------------ +Failures: + +------------------------------------------------------------ +Root Cause (first observed failure): +[0]: + time : 2025-06-21_21:17:27 + host : fs-mbz-gpu-852 + rank : 1 (local_rank: 1) + exitcode : 1 (pid: 144152) + error_file: + traceback : To enable traceback see: https://pytorch.org/docs/stable/elastic/errors.html +============================================================ +Traceback (most recent call last): + File "", line 198, in _run_module_as_main + File "", line 88, in _run_code + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 207, in + main() + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper + return arg(*args, **kwargs) + ^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 203, in main + launch(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 188, in launch + run(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/run.py", line 883, in run + elastic_launch( + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 139, in __call__ + return launch_agent(self._config, self._entrypoint, list(args)) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 270, in launch_agent + raise ChildFailedError( +torch.distributed.elastic.multiprocessing.errors.ChildFailedError: +============================================================ +./pretrain_gpt_profile.py FAILED +------------------------------------------------------------ +Failures: +[1]: + time : 2025-06-21_21:17:27 + host : fs-mbz-gpu-870 + rank : 13 (local_rank: 5) + exitcode : 1 (pid: 3432780) + error_file: + traceback : To enable traceback see: https://pytorch.org/docs/stable/elastic/errors.html +------------------------------------------------------------ +Root Cause (first observed failure): +[0]: + time : 2025-06-21_21:17:27 + host : fs-mbz-gpu-870 + rank : 10 (local_rank: 2) + exitcode : 1 (pid: 3432777) + error_file: + traceback : To enable traceback see: https://pytorch.org/docs/stable/elastic/errors.html +============================================================ +E0621 21:17:27.973000 2063251 site-packages/torch/distributed/elastic/multiprocessing/api.py:874] failed (exitcode: 1) local_rank: 5 (pid: 2063327) of binary: /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/bin/python3 +W0621 21:17:27.986000 2063251 site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py:1292] The node 'fs-mbz-gpu-901_2063251_0' has failed to shutdown the rendezvous '343200' due to an error of type RendezvousConnectionError. +[W621 21:17:27.426002538 TCPStore.cpp:106] [c10d] sendBytes failed on SocketImpl(fd=4, addr=[fs-mbz-gpu-901]:52104, remote=[fs-mbz-gpu-852]:29500): Broken pipe +Exception raised from sendBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:653 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x154228f785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x15421225aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baa358 (0x15421225c358 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #3: + 0x5babb3e (0x15421225db3e in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: c10d::TCPStore::doWait(c10::ArrayRef, std::allocator > >, std::chrono::duration >) + 0x1a6 (0x154212257ac6 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: c10d::TCPStore::doGet(std::__cxx11::basic_string, std::allocator > const&) + 0x33 (0x154212257ea3 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #6: c10d::TCPStore::get(std::__cxx11::basic_string, std::allocator > const&) + 0xab (0x154212258f8b in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #7: + 0xc0f526 (0x15422158b526 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) +frame #8: + 0x37f17d (0x154220cfb17d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) + +frame #26: + 0x29d90 (0x15422a27ad90 in /lib/x86_64-linux-gnu/libc.so.6) +frame #27: __libc_start_main + 0x80 (0x15422a27ae40 in /lib/x86_64-linux-gnu/libc.so.6) + +W0621 21:17:27.999000 2063251 site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py:1292] The node 'fs-mbz-gpu-901_2063251_0' has failed to shutdown the rendezvous '343200' due to an error of type RendezvousConnectionError. +[W621 21:17:27.437861501 TCPStore.cpp:106] [c10d] sendBytes failed on SocketImpl(fd=4, addr=[fs-mbz-gpu-901]:52104, remote=[fs-mbz-gpu-852]:29500): Broken pipe +Exception raised from sendBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:653 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x154228f785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x15421225aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baa358 (0x15421225c358 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #3: + 0x5babb3e (0x15421225db3e in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: c10d::TCPStore::doWait(c10::ArrayRef, std::allocator > >, std::chrono::duration >) + 0x1a6 (0x154212257ac6 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: c10d::TCPStore::doGet(std::__cxx11::basic_string, std::allocator > const&) + 0x33 (0x154212257ea3 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #6: c10d::TCPStore::get(std::__cxx11::basic_string, std::allocator > const&) + 0xab (0x154212258f8b in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #7: + 0xc0f526 (0x15422158b526 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) +frame #8: + 0x37f17d (0x154220cfb17d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) + +frame #26: + 0x29d90 (0x15422a27ad90 in /lib/x86_64-linux-gnu/libc.so.6) +frame #27: __libc_start_main + 0x80 (0x15422a27ae40 in /lib/x86_64-linux-gnu/libc.so.6) + +W0621 21:17:28.009000 2063251 site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py:1292] The node 'fs-mbz-gpu-901_2063251_0' has failed to shutdown the rendezvous '343200' due to an error of type RendezvousConnectionError. +Traceback (most recent call last): + File "", line 198, in _run_module_as_main + File "", line 88, in _run_code + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 207, in + main() + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper + return arg(*args, **kwargs) + ^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 203, in main + launch(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 188, in launch + run(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/run.py", line 883, in run + elastic_launch( + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 139, in __call__ + return launch_agent(self._config, self._entrypoint, list(args)) +E0621 21:17:28.016000 3363945 site-packages/torch/distributed/elastic/multiprocessing/api.py:874] failed (exitcode: 1) local_rank: 1 (pid: 3364016) of binary: /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/bin/python3 + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 270, in launch_agent + raise ChildFailedError( +torch.distributed.elastic.multiprocessing.errors.ChildFailedError: +============================================================ +./pretrain_gpt_profile.py FAILED +------------------------------------------------------------ +Failures: + +------------------------------------------------------------ +Root Cause (first observed failure): +[0]: + time : 2025-06-21_21:17:27 + host : fs-mbz-gpu-901 + rank : 29 (local_rank: 5) + exitcode : 1 (pid: 2063327) + error_file: + traceback : To enable traceback see: https://pytorch.org/docs/stable/elastic/errors.html +============================================================ +[W621 21:17:28.397907967 TCPStore.cpp:115] [c10d] recvVector failed on SocketImpl(fd=3, addr=[fs-mbz-gpu-881]:53752, remote=[fs-mbz-gpu-852]:29500): failed to recv, got 0 bytes +Exception raised from recvBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:678 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x14f0859785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x14f06e85aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baa0d0 (0x14f06e85c0d0 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #3: + 0x5baa81d (0x14f06e85c81d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: + 0x5bab4a9 (0x14f06e85d4a9 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: c10d::TCPStore::compareSet(std::__cxx11::basic_string, std::allocator > const&, std::vector > const&, std::vector > const&) + 0x1fb (0x14f06e8574cb in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #6: + 0xc0f919 (0x14f07db8b919 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) +frame #7: + 0x37f17d (0x14f07d2fb17d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) + +frame #25: + 0x29d90 (0x14f086962d90 in /lib/x86_64-linux-gnu/libc.so.6) +frame #26: __libc_start_main + 0x80 (0x14f086962e40 in /lib/x86_64-linux-gnu/libc.so.6) + +W0621 21:17:28.038000 3363945 site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py:1292] The node 'fs-mbz-gpu-881_3363945_0' has failed to shutdown the rendezvous '343200' due to an error of type RendezvousConnectionError. +[W621 21:17:28.409394959 TCPStore.cpp:106] [c10d] sendBytes failed on SocketImpl(fd=3, addr=[fs-mbz-gpu-881]:53752, remote=[fs-mbz-gpu-852]:29500): Broken pipe +Exception raised from sendBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:653 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x14f0859785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x14f06e85aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baa358 (0x14f06e85c358 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #3: + 0x5babb3e (0x14f06e85db3e in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: c10d::TCPStore::compareSet(std::__cxx11::basic_string, std::allocator > const&, std::vector > const&, std::vector > const&) + 0x299 (0x14f06e857569 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: + 0xc0f919 (0x14f07db8b919 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) +frame #6: + 0x37f17d (0x14f07d2fb17d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) + +frame #24: + 0x29d90 (0x14f086962d90 in /lib/x86_64-linux-gnu/libc.so.6) +frame #25: __libc_start_main + 0x80 (0x14f086962e40 in /lib/x86_64-linux-gnu/libc.so.6) + +W0621 21:17:28.047000 3363945 site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py:1292] The node 'fs-mbz-gpu-881_3363945_0' has failed to shutdown the rendezvous '343200' due to an error of type RendezvousConnectionError. +[W621 21:17:28.418249679 TCPStore.cpp:106] [c10d] sendBytes failed on SocketImpl(fd=3, addr=[fs-mbz-gpu-881]:53752, remote=[fs-mbz-gpu-852]:29500): Broken pipe +Exception raised from sendBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:653 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x14f0859785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x14f06e85aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baa358 (0x14f06e85c358 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #3: + 0x5babb3e (0x14f06e85db3e in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: c10d::TCPStore::compareSet(std::__cxx11::basic_string, std::allocator > const&, std::vector > const&, std::vector > const&) + 0x299 (0x14f06e857569 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: + 0xc0f919 (0x14f07db8b919 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) +frame #6: + 0x37f17d (0x14f07d2fb17d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) + +frame #24: + 0x29d90 (0x14f086962d90 in /lib/x86_64-linux-gnu/libc.so.6) +frame #25: __libc_start_main + 0x80 (0x14f086962e40 in /lib/x86_64-linux-gnu/libc.so.6) + +W0621 21:17:28.056000 3363945 site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py:1292] The node 'fs-mbz-gpu-881_3363945_0' has failed to shutdown the rendezvous '343200' due to an error of type RendezvousConnectionError. +Traceback (most recent call last): + File "", line 198, in _run_module_as_main + File "", line 88, in _run_code + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 207, in + main() + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper + return arg(*args, **kwargs) + ^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 203, in main + launch(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 188, in launch + run(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/run.py", line 883, in run + elastic_launch( + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 139, in __call__ + return launch_agent(self._config, self._entrypoint, list(args)) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 270, in launch_agent + raise ChildFailedError( +torch.distributed.elastic.multiprocessing.errors.ChildFailedError: +============================================================ +./pretrain_gpt_profile.py FAILED +------------------------------------------------------------ +Failures: + +------------------------------------------------------------ +Root Cause (first observed failure): +[0]: + time : 2025-06-21_21:17:27 + host : fs-mbz-gpu-881 + rank : 17 (local_rank: 1) + exitcode : 1 (pid: 3364016) + error_file: + traceback : To enable traceback see: https://pytorch.org/docs/stable/elastic/errors.html +============================================================ ++ set +x ++ set +x ++ set +x ++ set +x ++ for ctx_length in 1024 2048 4096 8192 12288 16384 24576 32768 40960 49152 65536 81920 98304 131072 ++ export PROF_CTX_LENGTH=98304 ++ PROF_CTX_LENGTH=98304 ++ name='/mnt/sharefs/users/hao.zhang/junda/megatron-prof-data--unstable-v5/mytrace.L98304*tp8.cp4.bs32.json' ++ '[' -f '/mnt/sharefs/users/hao.zhang/junda/megatron-prof-data--unstable-v5/mytrace.L98304*tp8.cp4.bs32.json' ']' ++ echo 'Running ctx_length=98304, TP_SIZE=8, CP_SIZE=4, BATCH_SIZE=32' ++ srun bash ./attnserver.sh ++ which python3 ++ python3 -m torch.distributed.launch --nproc_per_node 8 --nnodes 4 --node_rank 3 --rdzv_id 343200 --rdzv_backend c10d --rdzv_endpoint fs-mbz-gpu-852:29500 ./pretrain_gpt_profile.py --tensor-model-parallel-size 8 --context-parallel-size 4 --num-layers 2 --hidden-size 4096 --num-attention-heads 64 --group-query-attention --num-query-groups 16 --seq-length 98304 --max-position-embeddings 98304 --micro-batch-size 1 --global-batch-size 1 --lr 0.0005 --train-iters 10 --lr-decay-iters 150000 --lr-decay-style cosine --lr-warmup-iters 2 --weight-decay .1 --adam-beta2 .999 --fp16 --log-interval 1 --save-interval 16 --eval-interval 16 --eval-iters 1 --vocab-file vocab.json --merge-file merges.txt --save gpt-checkpoint --load gpt-checkpoint --logging-level 0 --mock-data --tensorboard-dir tensorboard-logs/ ++ which python3 ++ python3 -m torch.distributed.launch --nproc_per_node 8 --nnodes 4 --node_rank 0 --rdzv_id 343200 --rdzv_backend c10d --rdzv_endpoint fs-mbz-gpu-852:29500 ./pretrain_gpt_profile.py --tensor-model-parallel-size 8 --context-parallel-size 4 --num-layers 2 --hidden-size 4096 --num-attention-heads 64 --group-query-attention --num-query-groups 16 --seq-length 98304 --max-position-embeddings 98304 --micro-batch-size 1 --global-batch-size 1 --lr 0.0005 --train-iters 10 --lr-decay-iters 150000 --lr-decay-style cosine --lr-warmup-iters 2 --weight-decay .1 --adam-beta2 .999 --fp16 --log-interval 1 --save-interval 16 --eval-interval 16 --eval-iters 1 --vocab-file vocab.json --merge-file merges.txt --save gpt-checkpoint --load gpt-checkpoint --logging-level 0 --mock-data --tensorboard-dir tensorboard-logs/ ++ which python3 ++ python3 -m torch.distributed.launch --nproc_per_node 8 --nnodes 4 --node_rank 2 --rdzv_id 343200 --rdzv_backend c10d --rdzv_endpoint fs-mbz-gpu-852:29500 ./pretrain_gpt_profile.py --tensor-model-parallel-size 8 --context-parallel-size 4 --num-layers 2 --hidden-size 4096 --num-attention-heads 64 --group-query-attention --num-query-groups 16 --seq-length 98304 --max-position-embeddings 98304 --micro-batch-size 1 --global-batch-size 1 --lr 0.0005 --train-iters 10 --lr-decay-iters 150000 --lr-decay-style cosine --lr-warmup-iters 2 --weight-decay .1 --adam-beta2 .999 --fp16 --log-interval 1 --save-interval 16 --eval-interval 16 --eval-iters 1 --vocab-file vocab.json --merge-file merges.txt --save gpt-checkpoint --load gpt-checkpoint --logging-level 0 --mock-data --tensorboard-dir tensorboard-logs/ ++ which python3 ++ python3 -m torch.distributed.launch --nproc_per_node 8 --nnodes 4 --node_rank 1 --rdzv_id 343200 --rdzv_backend c10d --rdzv_endpoint fs-mbz-gpu-852:29500 ./pretrain_gpt_profile.py --tensor-model-parallel-size 8 --context-parallel-size 4 --num-layers 2 --hidden-size 4096 --num-attention-heads 64 --group-query-attention --num-query-groups 16 --seq-length 98304 --max-position-embeddings 98304 --micro-batch-size 1 --global-batch-size 1 --lr 0.0005 --train-iters 10 --lr-decay-iters 150000 --lr-decay-style cosine --lr-warmup-iters 2 --weight-decay .1 --adam-beta2 .999 --fp16 --log-interval 1 --save-interval 16 --eval-interval 16 --eval-iters 1 --vocab-file vocab.json --merge-file merges.txt --save gpt-checkpoint --load gpt-checkpoint --logging-level 0 --mock-data --tensorboard-dir tensorboard-logs/ +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py:207: FutureWarning: The module torch.distributed.launch is deprecated +and will be removed in future. Use torchrun. +Note that --use-env is set by default in torchrun. +If your script expects `--local-rank` argument to be set, please +change it to read from `os.environ['LOCAL_RANK']` instead. See +https://pytorch.org/docs/stable/distributed.html#launch-utility for +further instructions + + main() +W0621 21:17:31.986000 145950 site-packages/torch/distributed/run.py:766] +W0621 21:17:31.986000 145950 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:17:31.986000 145950 site-packages/torch/distributed/run.py:766] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0621 21:17:31.986000 145950 site-packages/torch/distributed/run.py:766] ***************************************** +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py:207: FutureWarning: The module torch.distributed.launch is deprecated +and will be removed in future. Use torchrun. +Note that --use-env is set by default in torchrun. +If your script expects `--local-rank` argument to be set, please +change it to read from `os.environ['LOCAL_RANK']` instead. See +https://pytorch.org/docs/stable/distributed.html#launch-utility for +further instructions + + main() +W0621 21:17:31.989000 2065087 site-packages/torch/distributed/run.py:766] +W0621 21:17:31.989000 2065087 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:17:31.989000 2065087 site-packages/torch/distributed/run.py:766] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0621 21:17:31.989000 2065087 site-packages/torch/distributed/run.py:766] ***************************************** +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py:207: FutureWarning: The module torch.distributed.launch is deprecated +and will be removed in future. Use torchrun. +Note that --use-env is set by default in torchrun. +If your script expects `--local-rank` argument to be set, please +change it to read from `os.environ['LOCAL_RANK']` instead. See +https://pytorch.org/docs/stable/distributed.html#launch-utility for +further instructions + + main() +W0621 21:17:32.031000 3434539 site-packages/torch/distributed/run.py:766] +W0621 21:17:32.031000 3434539 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:17:32.031000 3434539 site-packages/torch/distributed/run.py:766] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0621 21:17:32.031000 3434539 site-packages/torch/distributed/run.py:766] ***************************************** +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py:207: FutureWarning: The module torch.distributed.launch is deprecated +and will be removed in future. Use torchrun. +Note that --use-env is set by default in torchrun. +If your script expects `--local-rank` argument to be set, please +change it to read from `os.environ['LOCAL_RANK']` instead. See +https://pytorch.org/docs/stable/distributed.html#launch-utility for +further instructions + + main() +W0621 21:17:32.059000 3365780 site-packages/torch/distributed/run.py:766] +W0621 21:17:32.059000 3365780 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:17:32.059000 3365780 site-packages/torch/distributed/run.py:766] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0621 21:17:32.059000 3365780 site-packages/torch/distributed/run.py:766] ***************************************** +[rank0]:[W621 21:17:54.613794938 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 0] using GPU 0 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank24]:[W621 21:17:54.177146529 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 24] using GPU 0 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank16]:[W621 21:17:54.161074134 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 16] using GPU 0 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank8]:[W621 21:17:54.719589083 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 8] using GPU 0 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank6]:[W621 21:17:54.843656378 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 6] using GPU 6 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank14]:[W621 21:17:54.730688088 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 14] using GPU 6 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank30]:[W621 21:17:54.252403974 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 30] using GPU 6 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank22]:[W621 21:17:54.184645852 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 22] using GPU 6 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank7]:[W621 21:17:54.850852963 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 7] using GPU 7 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank31]:[W621 21:17:54.259406218 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 31] using GPU 7 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank25]:[W621 21:17:54.260664348 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 25] using GPU 1 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank29]:[W621 21:17:54.261332729 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 29] using GPU 5 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank1]:[W621 21:17:54.853269842 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 1] using GPU 1 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank15]:[W621 21:17:54.740678181 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 15] using GPU 7 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank17]:[W621 21:17:54.194215282 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 17] using GPU 1 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank5]:[W621 21:17:54.855507473 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 5] using GPU 5 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank2]:[W621 21:17:54.857084267 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 2] using GPU 2 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank26]:[W621 21:17:54.265107198 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 26] using GPU 2 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank13]:[W621 21:17:54.743265496 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 13] using GPU 5 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank18]:[W621 21:17:54.196627202 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 18] using GPU 2 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank21]:[W621 21:17:54.196954853 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 21] using GPU 5 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank3]:[W621 21:17:54.858784679 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 3] using GPU 3 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank28]:[W621 21:17:54.267042263 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 28] using GPU 4 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank9]:[W621 21:17:54.745802403 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 9] using GPU 1 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank27]:[W621 21:17:54.267265509 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 27] using GPU 3 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank12]:[W621 21:17:54.745956386 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 12] using GPU 4 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank19]:[W621 21:17:54.199397984 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 19] using GPU 3 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank11]:[W621 21:17:54.746645650 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 11] using GPU 3 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank4]:[W621 21:17:54.860325812 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 4] using GPU 4 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank20]:[W621 21:17:54.200533101 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 20] using GPU 4 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank10]:[W621 21:17:54.747848397 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 10] using GPU 2 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank23]:[W621 21:17:54.200919268 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 23] using GPU 7 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +[rank6]: Traceback (most recent call last): +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank6]: pretrain( +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank6]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank6]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank6]: ^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank6]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank20]: Traceback (most recent call last): +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank20]: pretrain( +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank20]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank20]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank20]: ^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank20]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank6]: return _load_global_dist_base_checkpoint( +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank6]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank6]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank20]: return _load_global_dist_base_checkpoint( +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank20]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank20]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank6]: checkpoint.load_state_dict( +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank6]: return arg(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank6]: return _load_state_dict( +[rank6]: ^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank27]: Traceback (most recent call last): +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank27]: pretrain( +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank27]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank27]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank27]: ^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank27]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank14]: Traceback (most recent call last): +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank14]: pretrain( +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank14]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank14]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank14]: ^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank14]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank20]: checkpoint.load_state_dict( +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank20]: return arg(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank20]: return _load_state_dict( +[rank20]: ^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank6]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank6]: raise result +[rank6]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank6]: Traceback (most recent call last): (RANK 0) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank27]: return _load_global_dist_base_checkpoint( +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank27]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank27]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank14]: return _load_global_dist_base_checkpoint( +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank14]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank14]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank20]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank20]: raise result +[rank20]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank20]: Traceback (most recent call last): (RANK 0) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank27]: checkpoint.load_state_dict( +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank27]: return arg(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank27]: return _load_state_dict( +[rank27]: ^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank14]: checkpoint.load_state_dict( +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank14]: return arg(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank14]: return _load_state_dict( +[rank14]: ^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank27]: raise result +[rank27]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank27]: Traceback (most recent call last): (RANK 0) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank14]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank14]: raise result +[rank14]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank14]: Traceback (most recent call last): (RANK 0) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank6]: Traceback (most recent call last): (RANK 1) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 1) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 2) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 2) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 1) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 1) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 2) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 2) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank20]: raise CheckpointingException(_msg) +[rank6]: Traceback (most recent call last): (RANK 3) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 3) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 4) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: raise CheckpointingException(_msg) +[rank14]: raise CheckpointingException(_msg) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 4) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 3) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 3) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 5) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 4) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 4) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 5) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 6) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 6) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 5) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 5) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 6) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 6) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 7) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 7) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 8) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 7) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 7) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: Traceback (most recent call last): (RANK 8) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 8) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: Traceback (most recent call last): (RANK 8) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 9) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: Traceback (most recent call last): (RANK 9) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 10) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 9) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 9) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: Traceback (most recent call last): (RANK 10) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 10) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: Traceback (most recent call last): (RANK 10) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 11) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: Traceback (most recent call last): (RANK 11) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 12) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 11) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 11) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: Traceback (most recent call last): (RANK 12) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 13) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 13) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank27]: Traceback (most recent call last): (RANK 12) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: Traceback (most recent call last): (RANK 12) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 13) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 13) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 14) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: Traceback (most recent call last): (RANK 14) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 15) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 15) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank27]: Traceback (most recent call last): (RANK 14) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: Traceback (most recent call last): (RANK 14) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 16) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 15) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 15) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank20]: Traceback (most recent call last): (RANK 16) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 17) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: Traceback (most recent call last): (RANK 17) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: Traceback (most recent call last): (RANK 16) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: Traceback (most recent call last): (RANK 16) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 18) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 17) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 17) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 18) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 18) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 18) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 19) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank6]: Traceback (most recent call last): (RANK 19) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 20) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 19) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 19) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 20) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 21) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 21) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 20) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 20) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^ +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 21) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 21) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 22) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: ^^^^^^^^^ +[rank14]: ^^^^^^^^^ +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 22) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 23) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 23) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 22) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 22) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 24) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 23) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 23) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 24) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 25) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 24) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 24) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: Traceback (most recent call last): (RANK 25) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 25) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 25) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 26) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 26) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 26) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 26) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 27) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: Traceback (most recent call last): (RANK 27) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 27) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 27) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 28) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 28) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 28) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 28) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: Traceback (most recent call last): (RANK 29) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 29) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: Traceback (most recent call last): (RANK 29) +[rank14]: Traceback (most recent call last): (RANK 29) +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 30) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 30) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 30) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 30) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 31) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 31) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight + +[rank4]: Traceback (most recent call last): +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank4]: pretrain( +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 31) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 31) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight + +[rank19]: Traceback (most recent call last): +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank19]: pretrain( +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank19]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank4]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank19]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank19]: ^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank19]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank19]: return _load_global_dist_base_checkpoint( +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank4]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank4]: ^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank4]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank4]: return _load_global_dist_base_checkpoint( +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight + +[rank29]: Traceback (most recent call last): +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank29]: pretrain( +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank29]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight + +[rank8]: Traceback (most recent call last): +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank8]: pretrain( +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank8]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank19]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank19]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank19]: checkpoint.load_state_dict( +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank4]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank4]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank4]: checkpoint.load_state_dict( +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank29]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank29]: ^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank29]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank29]: return _load_global_dist_base_checkpoint( +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank8]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank8]: ^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank8]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank8]: return _load_global_dist_base_checkpoint( +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank19]: return arg(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank19]: return _load_state_dict( +[rank19]: ^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank19]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank4]: return arg(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank4]: return _load_state_dict( +[rank4]: ^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank4]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank29]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank29]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank29]: checkpoint.load_state_dict( +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank8]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank8]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank8]: checkpoint.load_state_dict( +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank19]: raise result +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank4]: raise result +[rank4]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank4]: Traceback (most recent call last): (RANK 0) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank29]: return arg(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank29]: return _load_state_dict( +[rank29]: ^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank29]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank8]: return arg(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank8]: return _load_state_dict( +[rank8]: ^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank8]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank19]: Traceback (most recent call last): (RANK 0) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank29]: raise result +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank8]: raise result +[rank8]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 1) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank29]: Traceback (most recent call last): (RANK 0) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: Traceback (most recent call last): (RANK 0) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 1) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 1) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 1) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 2) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 2) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 2) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 2) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 3) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 3) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: ^^^^^^^^^ +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 3) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 3) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 4) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 4) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 4) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 4) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank19]: Traceback (most recent call last): (RANK 5) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 5) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 5) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: Traceback (most recent call last): (RANK 5) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 6) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: raise CheckpointingException(_msg) +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tenso[rank12]: Traceback (most recent call last): +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank12]: pretrain( +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank12]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 6) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 6) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank12]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank12]: ^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank12]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank12]: return _load_global_dist_base_checkpoint( +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: Traceback (most recent call last): (RANK 7) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 7) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank12]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank12]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank12]: checkpoint.load_state_dict( +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: Traceback (most recent call last): (RANK 7) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank12]: return arg(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank12]: return _load_state_dict( +[rank12]: ^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank12]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 8) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 8) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/j[rank0]: Traceback (most recent call last): +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank12]: raise result +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank0]: pretrain( +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 8) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in [rank30]: Traceback (most recent call last): +[rank12]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank12]: Traceback (most recent call last): (RANK 0) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: Traceback (most recent call last): (RANK 9) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank0]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank0]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank0]: ^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank0]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank30]: pretrain( +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank30]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank30]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank30]: ^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank30]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 1) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 10) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank0]: return _load_global_dist_base_checkpoint( +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank0]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank0]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank30]: return _load_global_dist_base_checkpoint( +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank30]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank30]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank0]: checkpoint.load_state_dict( +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank0]: return arg(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank0]: return _load_state_dict( +[rank0]: ^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank0]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank30]: checkpoint.load_state_dict( +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank30]: return arg(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank30]: return _load_state_dict( +[rank30]: ^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 2) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096))[rank21]: Traceback (most recent call last): +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank0]: raise result +[rank0]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank0]: Traceback (most recent call last): (RANK 0) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank21]: pretrain( +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank21]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank21]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank21]: ^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank21]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank0]: result = func(*args, **kwargs) +[rank0]: [rank1]: Traceback (most recent call last): +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank1]: pretrain( +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank30]: raise result +[rank30]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank30]: Traceback (most recent call last): (RANK 0) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: rewrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embeddingrs) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank21]: return _load_global_dist_base_checkpoint( +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank21]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank21]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank1]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank1]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank1]: ^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank1]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 6) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank21]: checkpoint.load_state_dict( +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank21]: return arg(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank21]: return _load_state_dict( +[rank21]: ^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank1]: return _load_global_dist_base_checkpoint( +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank1]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank1]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: Traceback (most recent call last): (RANK 9) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 7) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank21]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank1]: checkpoint.load_state_dict( +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank1]: return arg(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank1]: return _load_state_dict( +[rank1]: ^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank1]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 10) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank21]: raise result +[rank21]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank21]: Traceback (most recent call last): (RANK 0) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank1]: raise result +[rank1]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank1]: Traceback (most recent call last): (RANK 0) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: raise CheckpointingException(_msg) +[rank21]: re[rank18]: Traceback (most recent call last): +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank18]: pretrain( +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank18]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank18]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank18]: ^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank1]: result = func(*args, **kwargs) +[rank1]: [rank5]: Traceback (most recent call last): +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank5]: pretrain( +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096))[rank25]: Traceback (most recent call last): +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 8) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/j[rank11]: Traceback (most recent call last): +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank18]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank18]: return _load_global_dist_base_checkpoint( +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank18]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank5]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank5]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank5]: ^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank5]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank25]: pretrain( +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank25]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank25]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank25]: ^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank25]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank11]: pretrain( +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank11]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank11]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank11]: ^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank11]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank18]: checkpoint.load_state_dict( +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank18]: return arg(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank18]: return _load_state_dict( +[rank18]: ^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank5]: return _load_global_dist_base_checkpoint( +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank5]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank5]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank25]: return _load_global_dist_base_checkpoint( +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank25]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank25]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank11]: return _load_global_dist_base_checkpoint( +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank11]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank11]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank18]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank5]: checkpoint.load_state_dict( +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank5]: return arg(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank5]: return _load_state_dict( +[rank5]: ^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank5]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank25]: checkpoint.load_state_dict( +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank25]: return arg(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank25]: return _load_state_dict( +[rank25]: ^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank11]: checkpoint.load_state_dict( +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank11]: return arg(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank11]: return _load_state_dict( +[rank11]: ^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank11]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank18]: raise result +[rank18]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank18]: Traceback (most recent call last): (RANK 0) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank5]: raise result +[rank5]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank5]: Traceback (most recent call last): (RANK 0) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: re tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 11) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: result = func(*args, **kwargs) +[rank5]: [rank2]: Traceback (most recent call last): +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank2]: pretrain( +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank25]: raise result +[rank25]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank25]: Traceback (most recent call last): (RANK 0) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank11]: raise result +[rank11]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank11]: Traceback (most recent call last): (RANK 0) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: re[rank10]: Traceback (most recent call last): +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 12) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank2]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank2]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank2]: ^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank2]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: re[rank24]: Traceback (most recent call last): +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank24]: pretrain( +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank24]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank24]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank24]: ^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank10]: pretrain( +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank10]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank10]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank10]: ^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank10]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank2]: return _load_global_dist_base_checkpoint( +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank2]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank2]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank24]: return _load_global_dist_base_checkpoint( +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank24]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank10]: return _load_global_dist_base_checkpoint( +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank10]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank10]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 13) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank2]: checkpoint.load_state_dict( +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank2]: return arg(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank2]: return _load_state_dict( +[rank2]: ^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank2]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank24]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank24]: checkpoint.load_state_dict( +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank24]: return arg(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank24]: return _load_state_dict( +[rank24]: ^^^^^^^^^^^^^^^^^ +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank10]: checkpoint.load_state_dict( +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank10]: return arg(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank10]: return _load_state_dict( +[rank10]: ^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointisult = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank2]: raise result +[rank2]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank2]: Traceback (most recent call last): (RANK 0) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank24]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank10]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 1) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank2]: result = func(*args, **kwargs) +[rank2]: [rank7]: Traceback (most recent call last): +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank7]: pretrain( +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank24]: raise result +[rank24]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank24]: Traceback (most recent call last): (RANK 0) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank10]: raise result +[rank10]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank10]: Traceback (most recent call last): (RANK 0) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: re[rank9]: Traceback (most recent call last): +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 2) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank7]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank7]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank7]: ^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank7]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: re[rank26]: Traceback (most recent call last): +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank26]: pretrain( +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank26]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank26]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank26]: ^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank9]: pretrain( +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank9]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank9]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank9]: ^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank9]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank21]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank7]: return _load_global_dist_base_checkpoint( +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank7]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank7]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank26]: return _load_global_dist_base_checkpoint( +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank26]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank9]: return _load_global_dist_base_checkpoint( +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank9]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank9]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank7]: checkpoint.load_state_dict( +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank7]: return arg(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank7]: return _load_state_dict( +[rank7]: ^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank7]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank26]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank26]: checkpoint.load_state_dict( +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank26]: return arg(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank26]: return _load_state_dict( +[rank26]: ^^^^^^^^^^^^^^^^^ +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank9]: checkpoint.load_state_dict( +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank9]: return arg(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank9]: return _load_state_dict( +[rank9]: ^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding[rank22]: Traceback (most recent call last): +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank22]: pretrain( +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank22]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank22]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank22]: ^^^^^^^^^^^^^^^^ +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank7]: raise result +[rank7]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank7]: Traceback (most recent call last): (RANK 0) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank26]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank9]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank22]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank22]: return _load_global_dist_base_checkpoint( +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank22]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: result = func(*args, **kwargs) +[rank7]: [rank3]: Traceback (most recent call last): +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank3]: pretrain( +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank26]: raise result +[rank26]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank26]: Traceback (most recent call last): (RANK 0) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank9]: raise result +[rank9]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank9]: Traceback (most recent call last): (RANK 0) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: [rank15]: Traceback (most recent call last): +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank22]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank22]: checkpoint.load_state_dict( +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank22]: return arg(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank22]: return _load_state_dict( +[rank22]: ^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank3]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank3]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank3]: ^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank3]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: re[rank28]: Traceback (most recent call last): +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank28]: pretrain( +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank28]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank28]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank28]: ^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank15]: pretrain( +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank15]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank15]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank15]: ^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank15]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank22]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank22]: raise result +[rank22]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank22]: Traceback (most recent call last): (RANK 0) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank3]: return _load_global_dist_base_checkpoint( +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank3]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank3]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank28]: return _load_global_dist_base_checkpoint( +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank28]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank15]: return _load_global_dist_base_checkpoint( +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank15]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank15]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: re[rank17]: Traceback (most recent call last): +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank17]: pretrain( +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank17]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank3]: checkpoint.load_state_dict( +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank3]: return arg(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank3]: return _load_state_dict( +[rank3]: ^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank3]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank28]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank28]: checkpoint.load_state_dict( +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank28]: return arg(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank28]: return _load_state_dict( +[rank28]: ^^^^^^^^^^^^^^^^^ +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank15]: checkpoint.load_state_dict( +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank15]: return arg(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank15]: return _load_state_dict( +[rank15]: ^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank17]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank17]: ^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank17]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank17]: return _load_global_dist_base_checkpoint( +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank3]: raise result +[rank3]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank3]: Traceback (most recent call last): (RANK 0) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank28]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank15]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank17]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank17]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank17]: checkpoint.load_state_dict( +[rank3]: result = func(*args, **kwargs) +[rank3]: unda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank28]: raise result +[rank28]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank28]: Traceback (most recent call last): (RANK 0) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank15]: raise result +[rank15]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank15]: Traceback (most recent call last): (RANK 0) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank17]: return arg(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank17]: return _load_state_dict( +[rank17]: ^^^^^^^^^^^^^^^^^ +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 9) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank28]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank15]: re.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 3) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank17]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank17]: raise result +[rank17]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank17]: Traceback (most recent call last): (RANK 0) +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 1) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 4) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 10) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 2) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 1) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 11) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.1 ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 5) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 1) +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", lin[rank13]: Traceback (most recent call last): +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank13]: pretrain( +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank13]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank13]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank13]: ^^^^^^^^^^^^^^^^ +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 2) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 11) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank13]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank13]: return _load_global_dist_base_checkpoint( +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank13]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 2) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 12) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank13]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank13]: checkpoint.load_state_dict( +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank13]: return arg(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank13]: return _load_state_dict( +[rank13]: ^^^^^^^^^^^^^^^^^ +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding[rank23]: Traceback (most recent call last): +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank23]: pretrain( +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank23]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank13]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank13]: raise result +[rank13]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank13]: Traceback (most recent call last): (RANK 0) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank23]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank23]: ^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank23]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank23]: return _load_global_dist_base_checkpoint( +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 3) ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: reunda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank23]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank23]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank23]: checkpoint.load_state_dict( +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 1) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 13) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 9) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank23]: return arg(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointisult = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 10) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank23]: return _load_state_dict( +[rank23]: ^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank23]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank23]: raise result +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 2) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 1) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank23]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank23]: Traceback (most recent call last): (RANK 0) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: re[rank16]: Traceback (most recent call last): +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank16]: pretrain( +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 2) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank16]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank16]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank16]: ^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank16]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 3) ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: ^^^^^^^^^ +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 11) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.1sult = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank16]: return _load_global_dist_base_checkpoint( +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank16]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank16]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 1) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 1) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank16]: checkpoint.load_state_dict( +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank16]: return arg(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embeddingsult = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank16]: return _load_state_dict( +[rank16]: ^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank16]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank16]: raise result +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 2) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 1) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank11]: Traceback (most recent call last): (RANK 2) +[rank16]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank16]: Traceback (most recent call last): (RANK 0) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: re.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 3) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 2) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 3) ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: ^^^^^^^^^ +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 4) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 1) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 1) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embeddingsult = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 5) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 2) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 1) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 2) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", linsult = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 2) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 1) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 3) +[rank28]: ^^^^^^^^^ +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 3)e 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 6) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank17]: raise CheckpointingException(_msg) +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 4) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 3) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 7) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 2) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 4) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embeddingsult = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 5) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 1) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 5) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 8) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in 2/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tenso ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", lin.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 3) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank23]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 1) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 4) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 12) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 2) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 2) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embeddingsult = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 13) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 1) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank7]: raise CheckpointingException(_msg) +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 5) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_c +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 3) ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", linsult = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank22]: raise CheckpointingException(_msg) +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 1) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 1) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 4) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 2) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 2) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 2) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embeddinge 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 5) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 6) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding[rank31]: Traceback (most recent call last): +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank31]: pretrain( +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank31]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensosult = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 7) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 3)2/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank31]: ^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank31]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank31]: return _load_global_dist_base_checkpoint( +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 1) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank21]: local_data = map_fun() +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 12) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank31]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank31]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank31]: checkpoint.load_state_dict( +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank31]: return arg(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank31]: return _load_state_dict( +[rank31]: ^^^^^^^^^^^^^^^^^ +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 2) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 8) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in .position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 3) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 13) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank31]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank31]: raise result +[rank31]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank31]: Traceback (most recent call last): (RANK 0) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embeddingsult = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: reng/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 1) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 4) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: Traceback (most recent call last): (RANK 14) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_c +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 15) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: raise CheckpointingException(_msg) +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 5) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 4) +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 2) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", linsult = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 3) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 1) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 5) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: Traceback (most recent call last): (RANK 16) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/chec.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 3) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 4) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 2) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tenso +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: Traceback (most recent call last): (RANK 4) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 5) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 4) +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", linrs) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank22]: Traceback (most recent call last): (RANK 3) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 5) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 6) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 4) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 5) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", lin.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 3) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 7) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tenso +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: Traceback (most recent call last): (RANK 4) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank22]: Traceback (most recent call last): (RANK 5) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", lin.position_embeddings.weight +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 4) +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 8) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/jheckpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank18]: Traceback (most recent call last): (RANK 3) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 5) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 14) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 4) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 5) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", linsult = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 15) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 1) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensoheckpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 14) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank18]: Traceback (most recent call last): (RANK 5) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", lin.position_embeddings.weight +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 2) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 16) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_load.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 3) +[rank23]: Traceback (most recent call last): (RANK 3) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 15) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 4) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 16) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embeddinge 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 4) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: ^^^^^^^^^ +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 6) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_load +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 7) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 5) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 4) +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank13]: Traceback (most recent call last): (RANK 5) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", linng/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 14) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 6) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 5) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: Traceback (most recent call last): (RANK 8) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in .position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 3) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 15) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 6) +[rank26]: Traceback (most recent call last): (RANK 4) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 5) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 7) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", lin.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 3) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: Traceback (most recent call last): (RANK 7) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 16) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/chece 605, in create_local_plan +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 8) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in .position_embeddings.weight +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 6) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 8) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 4) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank15]: Traceback (most recent call last): (RANK 3) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/j +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 4) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 5) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank18]: Traceback (most recent call last): (RANK 7) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 4) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", linkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 5) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 8) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in e 605, in create_local_plan +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 5) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 17) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", liner.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 6) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank8]: Traceback (most recent call last): (RANK 17) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 18) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 6) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096e 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 18) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: Traceback (most recent call last): (RANK 7) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 7) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 6) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 19) +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 8) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in .position_embeddings.weight +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 7) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[ransult = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank16]: Traceback (most recent call last): (RANK 3) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: ^^^^^^^^^ +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 1) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 4) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: Traceback (most recent call last): (RANK 8) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/jrs) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 6) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 8) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in e 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 2) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 5) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 7) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 6) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", linwrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 7) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embeddinge 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 6) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 9) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: ^^^^^^^^^ +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: Traceback (most recent call last): (RANK 8) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/jrs) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank15]: raise CheckpointingException(_msg) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 10) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 6) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 8) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in e 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 7) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 7) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 6) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 8) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096))wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 7) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 9) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: ^^^^^^^^^ +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 9) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: Traceback (most recent call last): (RANK 8) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/jer.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 10) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 10) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 17) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 8) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 18) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 9) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096))e 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 6) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096))e 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 6) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 10) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 7) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 19) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[ranrs) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096))e 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 7) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 8) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 6) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 6) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 11) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 7) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 7) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: Traceback (most recent call last): (RANK 8) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in .position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 3) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 12) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: local_data = map_fun() +[rank0]: raise CheckpointingException(_msg) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 8) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in e 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: Traceback (most recent call last): (RANK 4) +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 8) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 6) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 13) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 9) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 5) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointie 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 6) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: Traceback (most recent call last): (RANK 7) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", linwrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 10) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 9) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: local_plan = planner.create_local_plan() +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 8) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in ])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 7) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 11) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.1k4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank29]: Traceback (most recent call last): (RANK 19) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 8) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 20) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: Traceback (most recent call last): (RANK 10) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 20) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 11) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 9) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 21) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: ^^^^^^^^^ +[rank29]: Traceback (most recent call last): (RANK 21) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatrowrapper +[rank24]: result = func(*args, **kwargs) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 12) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 10) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 13) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: raiunda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 9) +[rank24]: Traceback (most recent call last): (RANK 9) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointiunda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096))ng/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 14) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 9) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 10) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 10) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096))wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 15) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 11) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 10) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.1unda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 9) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 11) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.1e 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: Traceback (most recent call last): (RANK 16) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checwrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: Traceback (most recent call last): (RANK 9) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 6) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 9) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 10) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 10) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096))wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 7) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 11) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: Traceback (most recent call last): (RANK 10) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: Traceback (most recent call last): (RANK 9) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 8) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 11) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 11) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 12) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 10) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 13) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 12) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: Traceback (most recent call last): (RANK 12) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: Traceback (most recent call last): (RANK 11) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 13) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 13) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_cunda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 12) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointiwrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointie 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 9) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 9) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 6) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 10) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank25]: Traceback (most recent call last): (RANK 13) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointiwrapper +[rank26]: result = func(*args, **kwargs) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 10) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 7) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096))2/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 9) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 8) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in kpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: Traceback (most recent call last): (RANK 11) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.1unda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 10) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 17) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 9) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 12) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 10) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096))n/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 22) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 13) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_cwrapper +[rank10]: result = func(*args, **kwargs) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 18) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096kpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank1]: Traceback (most recent call last): (RANK 11) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.1rs) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 6) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 23) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank10]: Traceback (most recent call last): (RANK 9) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 17) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 24) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: ^^^^^^^^^ +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 7) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 11) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 10) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 12) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 18) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 8) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank30]: ^^^^^^^^^ +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096))ng/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 14) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096ng/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/jse CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 22) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 13) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 23) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointi tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 11) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank15]: Traceback (most recent call last): (RANK 15) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 14) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 12) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 16) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checwrapper +[rank11]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 15) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 24) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: ^^^^^^^^^ +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[r2/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank11]: Traceback (most recent call last): (RANK 9) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 16) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checwrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 12) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 13) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 10) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 9) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointi-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank0]: raise CheckpointingException(_msg) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 25) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 13) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 10) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_c2/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 26) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096))heckpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 14) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096))])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 19) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 12) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch fo tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 11) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 15) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 20) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 13) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank28]: Traceback (most recent call last): (RANK 12) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 16) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 21) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 14) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_load tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 11) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatro tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 11) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 13) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 15) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 14) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 12) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 12) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 13) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 16) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 13) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointi tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 11) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_load2/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 15) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointikpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 12) +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 16) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/chec tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 11) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 17) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 12) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 12) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 18) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 13) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 13) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointin/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_c2/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 13) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096 tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 11) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 22) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 12) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: Traceback (most recent call last): (RANK 14) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 12) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: Traceback (most recent call last): (RANK 23) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 24) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site tensor for key embedding.position_embeddings.weight +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 13) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 15) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 13) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: Traceback (most recent call last): (RANK 11) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_cunda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointier.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 9) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: Traceback (most recent call last): (RANK 16) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checng/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 17) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 12) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 10) +[rank30]: Traceback (most recent call last): (RANK 14) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 18) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: Traceback (most recent call last): (RANK 13) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 11) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.1ank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 15) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 19) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[ranng/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 14) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 25) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 26) +[rank30]: Traceback (most recent call last): (RANK 16) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checng/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 14) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: Traceback (most recent call last): (RANK 14) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 15) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: Traceback (most recent call last): (RANK 15) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 27) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnheckpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 16) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checwrapper +[rank16]: result = func(*args, **kwargs) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 14) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 15) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 16) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/chec])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 19) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 15) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: Traceback (most recent call last): (RANK 9) +[rank0]: ^^^^^^^^^ +[rank25]: Traceback (most recent call last): (RANK 16) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checr loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 27) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 20) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 10) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 16) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loadheckpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 21) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 14) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: Traceback (most recent call last): (RANK 28) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatrong/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096))ng/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 14) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 15) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 29) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: raise CheckpointingException(_msg) +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hakpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 14) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 17) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 15) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank22]: Traceback (most recent call last): (RANK 15) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 16) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank30]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 16) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 17) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 16) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/chec tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 11) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: Traceback (most recent call last): (RANK 18) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 17) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank2]: raise CheckpointingException(_msg) +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096kpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 18) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 18) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 17) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 12) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 19) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 13) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096kpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[ranheckpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 14) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointikpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: raise CheckpointingException(_msg) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 15) +[rank24]: Traceback (most recent call last): (RANK 18) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 17) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 17) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096ng/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 18) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: Traceback (most recent call last): (RANK 14) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 18) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 16) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 15) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096ng/strategies/torch.py", line 605, in create_local_plan +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loadheckpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 14) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 16) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 14) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 25) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: Traceback (most recent call last): (RANK 15) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 17) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 15) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 26) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 16) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 16) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/check9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch fo])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 19) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_load2/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 18) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 12) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 19) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 20) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: Traceback (most recent call last): (RANK 20) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 13) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: raise CheckpointingException(_msg) +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 21) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 21) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 20) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raik8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatro])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 19) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_ct/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 21) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 28) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatrokpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 20) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 17) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: Traceback (most recent call last): (RANK 20) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 29) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 21) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 21) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: rain/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatro])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 19) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: megatron.core.dist_checkpointing.core.Checkpoiner.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 17) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 18) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 22) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096o.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 20) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 30) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: Traceback (most recent call last): (RANK 23) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 24) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/sitekpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 21) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 18) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 31) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatror loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 27) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 19) +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[raner.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight + +n/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank11]: Traceback (most recent call last): (RANK 17) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 17) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 22) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 18) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: Traceback (most recent call last): (RANK 28) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 29) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 18) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 23) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hakpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 19) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 19) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: Traceback (most recent call last): (RANK 24) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 19) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 20) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 17) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 20) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: Traceback (most recent call last): (RANK 20) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 21) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank22]: Traceback (most recent call last): (RANK 18) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 21) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatrose CheckpointingException(_msg) +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096ng/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 21) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 22) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank17]: Traceback (most recent call last): (RANK 14) +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raier.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatrokpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 23) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 17) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 17) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 15) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 24) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 18) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: local_plan = planner.create_local_plan() +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 16) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/chec tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 11) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank26]: Traceback (most recent call last): (RANK 18) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 19) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[raner.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 17) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 25) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank16]: Traceback (most recent call last): (RANK 12) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 17) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 13) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 18) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 18) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointin/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank24]: Traceback (most recent call last): (RANK 26) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 19) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[ranheckpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 14) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch fo])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 19) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank12]: raise CheckpointingException(_msg) +[rank21]: Traceback (most recent call last): (RANK 22) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 19) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 23) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 20) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 15) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 24) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/sitekpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 16) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank25]: Traceback (most recent call last): (RANK 20) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 21) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loadtingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 30) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 21) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatro-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatro])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 19) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 25) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 31) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 17) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight + +k0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 26) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 20) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 18) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank0]: Traceback (most recent call last): (RANK 20) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 21) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch foank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096o.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 21) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatror loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 27) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 25) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raik5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 26) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 30) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 28) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 31) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 20) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 29) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/han/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: Traceback (most recent call last): (RANK 27) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnse CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 22) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight + +ng/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 14) +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 21) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 22) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank25]: local_plan = planner.create_local_plan() +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 23) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 15) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 22) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 23) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 24) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 23) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 16) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/chec])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 19) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 24) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 19) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rn/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 22) +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 24) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rk7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 23) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: Traceback (most recent call last): (RANK 20) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 20) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 21) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 20) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 21) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 24) +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatroo.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 19) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 22) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 21) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 30) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 23) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raik1]: local_data = map_fun() +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: Traceback (most recent call last): (RANK 20) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 31) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 21) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 24) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 20) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 25) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight + +-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatrot/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 21) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 25) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 28) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 26) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raier.py", line 223, in local_step +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch fo])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 19) +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 17) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 29) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: Traceback (most recent call last): (RANK 26) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch fon/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.Checkpoinn/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 20) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 18) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank28]: Traceback (most recent call last): (RANK 22) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 22) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 19) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[ranse CheckpointingException(_msg) +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 23) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 23) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank17]: Traceback (most recent call last): (RANK 21) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 22) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 23) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 24) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/sitewrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: raise CheckpointingException(_msg) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 22) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 9) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 24) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 25) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank23]: Traceback (most recent call last): (RANK 23) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 24) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 24) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rse CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 22) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: Traceback (most recent call last): (RANK 10) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 26) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/siter loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 27) +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 23) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096))n/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch foank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 22) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 25) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 28) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 26) +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 24) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 23) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 29) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 25) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/han/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 24) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/siten/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 27) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mntingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 30) +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 22) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 22) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 26) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 31) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 23) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 27) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 23) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 24) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnse CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 22) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight + +r loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 23) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 24) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 11) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank15]: Traceback (most recent call last): (RANK 27) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 28) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 25) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 12) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 26) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 24) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 13) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 29) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: local_plan = planner.create_local_plan() +[rse CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 22) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointi-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/har loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: raise CheckpointingException(_msg) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 23) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 27) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch foo.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 30) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank30]: Traceback (most recent call last): (RANK 25) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 28) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 31) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 24) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 26) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 29) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight + +-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: local_plan = planner.create_local_plan() +[rk3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch fo-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/ha-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 20) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 25) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: Traceback (most recent call last): (RANK 25) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: Traceback (most recent call last): (RANK 25) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 26) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 26) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch fo-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 21) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 26) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch foo.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 30) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raiank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 25) +[rank25]: Traceback (most recent call last): (RANK 27) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: Traceback (most recent call last): (RANK 25) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 31) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 26) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 28) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight + +r loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 27) +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch fokpoint/logger.py", line 87, in wrapper +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 26) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 27) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank25]: Traceback (most recent call last): (RANK 29) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/ha-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 28) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 25) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank16]: Traceback (most recent call last): (RANK 17) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: Traceback (most recent call last): (RANK 25) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 29) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 18) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hang/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 26) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 26) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 14) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096n/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 27) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch fong/strategies/torch.py", line 605, in create_local_plan +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 22) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 14) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 28) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 23) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank2]: local_plan = planner.create_local_plan() +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 15) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 24) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 29) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank31]: Traceback (most recent call last): (RANK 15) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 16) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank22]: ^^^^^^^^^ +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 16) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checn/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/siter loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 27) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.Checkpoinank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 25) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checr loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 27) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 22) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 28) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 28) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 23) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 29) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 26) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 24) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 27) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 29) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^ +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/har loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 27) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 25) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/har loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 27) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/siteo.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 30) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 28) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 28) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 31) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 26) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight + +kpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 27) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 29) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 29) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnse CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 22) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 30) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank13]: Traceback (most recent call last): (RANK 17) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 23) +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 18) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/ha-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 25) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 24) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 31) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096t/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 26) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight + +r loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 27) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 28) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 28) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 29) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 27) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 29) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 28) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 28) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 29) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.Checkpoin])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 19) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.Checkpoint/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hakpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 29) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 28) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 17) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank13]: Traceback (most recent call last): (RANK 20) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: ^^^^^^^^^ +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 21) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/ha])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 19) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 29) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 20) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 18) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatrotingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 30) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 30) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096o.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 21) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 30) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 31) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 31) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight + +n/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatroo.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 30) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 31) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight + +t/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 22) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 31) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 28) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight + +o.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 23) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 30) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight + +o.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: raise CheckpointingException(_msg) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 24) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 30) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 29) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 31) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 25) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.Checkpoint/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight + +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 31) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 28) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +o.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 30) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 26) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight + +r loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 27) +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch fo-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 31) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 29) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight + +])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 19) +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 25) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 28) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.Checkpoinank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 29) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 25) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 20) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 26) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 26) +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 30) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 27) +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 31) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: Traceback (most recent call last): (RANK 21) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 27) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mntingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 30) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 22) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 28) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight + +o.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 30) +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 31) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 29) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: local_plan = planner.create_local_plan() +[rank31]: Traceback (most recent call last): (RANK 23) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 31) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight + +tingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 30) +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 24) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 30) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight + +n/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 22) +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 31) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 25) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 31) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight + +r loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 27) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight + +tingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 26) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 23) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank7]: Traceback (most recent call last): (RANK 30) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 24) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 31) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 27) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 28) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 28) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 29) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 25) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight + +tingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 30) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: ^^^^^^^^^ +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 26) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 30) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 31) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 29) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 31) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 27) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 30) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight + +t/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight + +o.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 28) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 28) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 31) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 30) +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 29) +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight + +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 29) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 31) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 30) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank16]: Traceback (most recent call last): (RANK 30) +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight + +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 31) +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 31) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight + +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([8192, 4096])) and expected ((98304, 4096)) tensor for key embedding.position_embeddings.weight + +[rank15]:[W621 21:18:12.730366017 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank6]:[W621 21:18:12.854425315 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank7]:[W621 21:18:12.884689083 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank5]:[W621 21:18:12.892840203 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank18]:[W621 21:18:12.329414695 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank4]:[W621 21:18:12.006274222 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank2]:[W621 21:18:12.008063668 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank1]:[W621 21:18:12.023853646 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank3]:[W621 21:18:13.049287473 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank20]:[W621 21:18:13.396143435 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank25]:[W621 21:18:13.471852888 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank0]:[W621 21:18:13.075051454 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank30]:[W621 21:18:13.520131058 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank27]:[W621 21:18:13.522251538 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank17]:[W621 21:18:13.454048138 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank31]:[W621 21:18:13.534307461 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank11]:[W621 21:18:13.011336068 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank14]:[W621 21:18:13.019226767 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank16]:[W621 21:18:13.485985854 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank28]:[W621 21:18:13.565394565 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank12]:[W621 21:18:13.044265744 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank19]:[W621 21:18:13.494829984 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank29]:[W621 21:18:13.570449573 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank21]:[W621 21:18:13.501731796 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank23]:[W621 21:18:13.506286264 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank26]:[W621 21:18:13.579961778 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank10]:[W621 21:18:13.058338237 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank22]:[W621 21:18:13.522871829 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank9]:[W621 21:18:13.112113716 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank24]:[W621 21:18:13.635204984 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank13]:[W621 21:18:13.120021984 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank8]:[W621 21:18:13.425964397 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +W0621 21:18:13.804000 145950 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 146038 closing signal SIGTERM +W0621 21:18:13.804000 145950 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 146039 closing signal SIGTERM +W0621 21:18:13.805000 145950 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 146040 closing signal SIGTERM +W0621 21:18:13.805000 145950 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 146041 closing signal SIGTERM +W0621 21:18:13.805000 145950 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 146042 closing signal SIGTERM +W0621 21:18:13.806000 145950 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 146043 closing signal SIGTERM +W0621 21:18:13.806000 145950 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 146044 closing signal SIGTERM +W0621 21:18:13.852000 3365780 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3365849 closing signal SIGTERM +W0621 21:18:13.853000 3365780 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3365850 closing signal SIGTERM +W0621 21:18:13.853000 3365780 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3365852 closing signal SIGTERM +W0621 21:18:13.854000 3365780 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3365853 closing signal SIGTERM +W0621 21:18:13.854000 3365780 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3365854 closing signal SIGTERM +W0621 21:18:13.854000 3365780 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3365855 closing signal SIGTERM +W0621 21:18:13.855000 3365780 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3365856 closing signal SIGTERM +W0621 21:18:13.931000 3434539 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3434609 closing signal SIGTERM +W0621 21:18:13.932000 3434539 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3434610 closing signal SIGTERM +W0621 21:18:13.933000 3434539 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3434611 closing signal SIGTERM +W0621 21:18:13.936000 2065087 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2065156 closing signal SIGTERM +W0621 21:18:13.934000 3434539 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3434613 closing signal SIGTERM +W0621 21:18:13.938000 2065087 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2065158 closing signal SIGTERM +W0621 21:18:13.934000 3434539 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3434614 closing signal SIGTERM +W0621 21:18:13.938000 2065087 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2065159 closing signal SIGTERM +W0621 21:18:13.934000 3434539 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3434615 closing signal SIGTERM +W0621 21:18:13.939000 2065087 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2065160 closing signal SIGTERM +W0621 21:18:13.935000 3434539 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3434616 closing signal SIGTERM +W0621 21:18:13.939000 2065087 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2065161 closing signal SIGTERM +W0621 21:18:13.939000 2065087 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2065162 closing signal SIGTERM +W0621 21:18:13.940000 2065087 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2065163 closing signal SIGTERM +E0621 21:18:14.371000 145950 site-packages/torch/distributed/elastic/multiprocessing/api.py:874] failed (exitcode: 1) local_rank: 7 (pid: 146045) of binary: /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/bin/python3 +Traceback (most recent call last): + File "", line 198, in _run_module_as_main + File "", line 88, in _run_code + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 207, in + main() + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper + return arg(*args, **kwargs) + ^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 203, in main + launch(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 188, in launch + run(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/run.py", line 883, in run + elastic_launch( + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 139, in __call__ + return launch_agent(self._config, self._entrypoint, list(args)) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 270, in launch_agent + raise ChildFailedError( +torch.distributed.elastic.multiprocessing.errors.ChildFailedError: +============================================================ +./pretrain_gpt_profile.py FAILED +------------------------------------------------------------ +Failures: + +------------------------------------------------------------ +Root Cause (first observed failure): +[0]: + time : 2025-06-21_21:18:13 + host : fs-mbz-gpu-852 + rank : 7 (local_rank: 7) + exitcode : 1 (pid: 146045) + error_file: + traceback : To enable traceback see: https://pytorch.org/docs/stable/elastic/errors.html +============================================================ +E0621 21:18:14.533000 3365780 site-packages/torch/distributed/elastic/multiprocessing/api.py:874] failed (exitcode: 1) local_rank: 2 (pid: 3365851) of binary: /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/bin/python3 +W0621 21:18:14.542000 3365780 site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py:1292] The node 'fs-mbz-gpu-881_3365780_0' has failed to shutdown the rendezvous '343200' due to an error of type RendezvousConnectionError. +[W621 21:18:14.914299381 TCPStore.cpp:106] [c10d] sendBytes failed on SocketImpl(fd=3, addr=[fs-mbz-gpu-881]:60950, remote=[fs-mbz-gpu-852]:29500): Broken pipe +Exception raised from sendBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:653 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x14ecb05785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x14ec9985aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baa358 (0x14ec9985c358 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +E0621 21:18:14.550000 3434539 site-packages/torch/distributed/elastic/multiprocessing/api.py:874] failed (exitcode: 1) local_rank: 3 (pid: 3434612) of binary: /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/bin/python3 +frame #3: + 0x5babb3e (0x14ec9985db3e in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: c10d::TCPStore::doWait(c10::ArrayRef, std::allocator > >, std::chrono::duration >) + 0x1a6 (0x14ec99857ac6 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: c10d::TCPStore::doGet(std::__cxx11::basic_string, std::allocator > const&) + 0x33 (0x14ec99857ea3 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #6: c10d::TCPStore::get(std::__cxx11::basic_string, std::allocator > const&) + 0xab (0x14ec99858f8b in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #7: + 0xc0f526 (0x14eca8b8b526 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) +frame #8: + 0x37f17d (0x14eca82fb17d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) + +frame #26: + 0x29d90 (0x14ecb1899d90 in /lib/x86_64-linux-gnu/libc.so.6) +frame #27: __libc_start_main + 0x80 (0x14ecb1899e40 in /lib/x86_64-linux-gnu/libc.so.6) + +W0621 21:18:14.555000 3365780 site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py:1292] The node 'fs-mbz-gpu-881_3365780_0' has failed to shutdown the rendezvous '343200' due to an error of type RendezvousConnectionError. +W0621 21:18:14.559000 3434539 site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py:1292] The node 'fs-mbz-gpu-870_3434539_0' has failed to shutdown the rendezvous '343200' due to an error of type RendezvousConnectionError. +[W621 21:18:14.925706677 TCPStore.cpp:106] [c10d] sendBytes failed on SocketImpl(fd=3, addr=[fs-mbz-gpu-881]:60950, remote=[fs-mbz-gpu-852]:29500): Broken pipe +Exception raised from sendBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:653 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x14ecb05785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x14ec9985aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baa358 (0x14ec9985c358 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #3: + 0x5babb3e (0x14ec9985db3e in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: c10d::TCPStore::doWait(c10::ArrayRef, std::allocator > >, std::chrono::duration >) + 0x1a6 (0x14ec99857ac6 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: c10d::TCPStore::doGet(std::__cxx11::basic_string, std::allocator > const&) + 0x33 (0x14ec99857ea3 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #6: c10d::TCPStore::get(std::__cxx11::basic_string, std::allocator > const&) + 0xab (0x14ec99858f8b in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #7: + 0xc0f526 (0x14eca8b8b526 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) +frame #8: + 0x37f17d (0x14eca82fb17d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) + +frame #26: + 0x29d90 (0x14ecb1899d90 in /lib/x86_64-linux-gnu/libc.so.6) +frame #27: __libc_start_main + 0x80 (0x14ecb1899e40 in /lib/x86_64-linux-gnu/libc.so.6) + +[W621 21:18:14.477643893 TCPStore.cpp:106] [c10d] sendBytes failed on SocketImpl(fd=3, addr=[fs-mbz-gpu-870]:35164, remote=[fs-mbz-gpu-852]:29500): Broken pipe +Exception raised from sendBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:653 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x14bf2bb785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x14bf14e5aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baa358 (0x14bf14e5c358 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +W0621 21:18:14.564000 3365780 site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py:1292] The node 'fs-mbz-gpu-881_3365780_0' has failed to shutdown the rendezvous '343200' due to an error of type RendezvousConnectionError. +frame #3: + 0x5babb3e (0x14bf14e5db3e in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: c10d::TCPStore::doWait(c10::ArrayRef, std::allocator > >, std::chrono::duration >) + 0x1a6 (0x14bf14e57ac6 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: c10d::TCPStore::doGet(std::__cxx11::basic_string, std::allocator > const&) + 0x33 (0x14bf14e57ea3 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +Traceback (most recent call last): + File "", line 198, in _run_module_as_main +frame #6: c10d::TCPStore::get(std::__cxx11::basic_string, std::allocator > const&) + 0xab (0x14bf14e58f8b in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #7: + 0xc0f526 (0x14bf2418b526 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) +frame #8: + 0x37f17d (0x14bf238fb17d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) + +frame #26: + 0x29d90 (0x14bf2ce4ed90 in /lib/x86_64-linux-gnu/libc.so.6) +frame #27: __libc_start_main + 0x80 (0x14bf2ce4ee40 in /lib/x86_64-linux-gnu/libc.so.6) + + File "", line 88, in _run_code + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 207, in +W0621 21:18:14.571000 3434539 site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py:1292] The node 'fs-mbz-gpu-870_3434539_0' has failed to shutdown the rendezvous '343200' due to an error of type RendezvousConnectionError. + main() + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper + return arg(*args, **kwargs) +[W621 21:18:14.489098169 TCPStore.cpp:106] [c10d] sendBytes failed on SocketImpl(fd=3, addr=[fs-mbz-gpu-870]:35164, remote=[fs-mbz-gpu-852]:29500): Broken pipe +Exception raised from sendBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:653 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x14bf2bb785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x14bf14e5aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baa358 (0x14bf14e5c358 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) + ^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 203, in main +frame #3: + 0x5babb3e (0x14bf14e5db3e in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: c10d::TCPStore::doWait(c10::ArrayRef, std::allocator > >, std::chrono::duration >) + 0x1a6 (0x14bf14e57ac6 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: c10d::TCPStore::doGet(std::__cxx11::basic_string, std::allocator > const&) + 0x33 (0x14bf14e57ea3 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) + launch(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 188, in launch +frame #6: c10d::TCPStore::get(std::__cxx11::basic_string, std::allocator > const&) + 0xab (0x14bf14e58f8b in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #7: + 0xc0f526 (0x14bf2418b526 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) +frame #8: + 0x37f17d (0x14bf238fb17d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) + +frame #26: + 0x29d90 (0x14bf2ce4ed90 in /lib/x86_64-linux-gnu/libc.so.6) +frame #27: __libc_start_main + 0x80 (0x14bf2ce4ee40 in /lib/x86_64-linux-gnu/libc.so.6) + + run(args) +W0621 21:18:14.581000 3434539 site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py:1292] The node 'fs-mbz-gpu-870_3434539_0' has failed to shutdown the rendezvous '343200' due to an error of type RendezvousConnectionError. + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/run.py", line 883, in run +Traceback (most recent call last): + elastic_launch( + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 139, in __call__ + File "", line 198, in _run_module_as_main + File "", line 88, in _run_code + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 207, in + return launch_agent(self._config, self._entrypoint, list(args)) + main() + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 270, in launch_agent + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper + raise ChildFailedError( +torch.distributed.elastic.multiprocessing.errors.ChildFailedError: +============================================================ +./pretrain_gpt_profile.py FAILED +------------------------------------------------------------ +Failures: + +------------------------------------------------------------ +Root Cause (first observed failure): +[0]: + time : 2025-06-21_21:18:13 + host : fs-mbz-gpu-881 + rank : 18 (local_rank: 2) + exitcode : 1 (pid: 3365851) + error_file: + traceback : To enable traceback see: https://pytorch.org/docs/stable/elastic/errors.html +============================================================ + return arg(*args, **kwargs) + ^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 203, in main + launch(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 188, in launch + run(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/run.py", line 883, in run + elastic_launch( + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 139, in __call__ + return launch_agent(self._config, self._entrypoint, list(args)) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 270, in launch_agent + raise ChildFailedError( +torch.distributed.elastic.multiprocessing.errors.ChildFailedError: +============================================================ +./pretrain_gpt_profile.py FAILED +------------------------------------------------------------ +Failures: + +------------------------------------------------------------ +Root Cause (first observed failure): +[0]: + time : 2025-06-21_21:18:13 + host : fs-mbz-gpu-870 + rank : 11 (local_rank: 3) + exitcode : 1 (pid: 3434612) + error_file: + traceback : To enable traceback see: https://pytorch.org/docs/stable/elastic/errors.html +============================================================ ++ set +x +E0621 21:18:14.655000 2065087 site-packages/torch/distributed/elastic/multiprocessing/api.py:874] failed (exitcode: 1) local_rank: 1 (pid: 2065157) of binary: /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/bin/python3 +W0621 21:18:14.664000 2065087 site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py:1292] The node 'fs-mbz-gpu-901_2065087_0' has failed to shutdown the rendezvous '343200' due to an error of type RendezvousConnectionError. +[W621 21:18:14.103771668 TCPStore.cpp:106] [c10d] sendBytes failed on SocketImpl(fd=3, addr=[fs-mbz-gpu-901]:57862, remote=[fs-mbz-gpu-852]:29500): Broken pipe +Exception raised from sendBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:653 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x1458675785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x14585045aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baa358 (0x14585045c358 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #3: + 0x5babb3e (0x14585045db3e in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: c10d::TCPStore::doWait(c10::ArrayRef, std::allocator > >, std::chrono::duration >) + 0x1a6 (0x145850457ac6 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: c10d::TCPStore::doGet(std::__cxx11::basic_string, std::allocator > const&) + 0x33 (0x145850457ea3 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #6: c10d::TCPStore::get(std::__cxx11::basic_string, std::allocator > const&) + 0xab (0x145850458f8b in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #7: + 0xc0f526 (0x14585f78b526 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) +frame #8: + 0x37f17d (0x14585eefb17d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) + +frame #26: + 0x29d90 (0x1458685f0d90 in /lib/x86_64-linux-gnu/libc.so.6) +frame #27: __libc_start_main + 0x80 (0x1458685f0e40 in /lib/x86_64-linux-gnu/libc.so.6) + +W0621 21:18:14.676000 2065087 site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py:1292] The node 'fs-mbz-gpu-901_2065087_0' has failed to shutdown the rendezvous '343200' due to an error of type RendezvousConnectionError. +[W621 21:18:14.115347909 TCPStore.cpp:106] [c10d] sendBytes failed on SocketImpl(fd=3, addr=[fs-mbz-gpu-901]:57862, remote=[fs-mbz-gpu-852]:29500): Broken pipe +Exception raised from sendBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:653 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x1458675785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x14585045aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baa358 (0x14585045c358 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #3: + 0x5babb3e (0x14585045db3e in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: c10d::TCPStore::doWait(c10::ArrayRef, std::allocator > >, std::chrono::duration >) + 0x1a6 (0x145850457ac6 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: c10d::TCPStore::doGet(std::__cxx11::basic_string, std::allocator > const&) + 0x33 (0x145850457ea3 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #6: c10d::TCPStore::get(std::__cxx11::basic_string, std::allocator > const&) + 0xab (0x145850458f8b in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #7: + 0xc0f526 (0x14585f78b526 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) +frame #8: + 0x37f17d (0x14585eefb17d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) + +frame #26: + 0x29d90 (0x1458685f0d90 in /lib/x86_64-linux-gnu/libc.so.6) +frame #27: __libc_start_main + 0x80 (0x1458685f0e40 in /lib/x86_64-linux-gnu/libc.so.6) + +W0621 21:18:14.686000 2065087 site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py:1292] The node 'fs-mbz-gpu-901_2065087_0' has failed to shutdown the rendezvous '343200' due to an error of type RendezvousConnectionError. +Traceback (most recent call last): + File "", line 198, in _run_module_as_main + File "", line 88, in _run_code + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 207, in + main() + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper + return arg(*args, **kwargs) + ^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 203, in main + launch(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 188, in launch + run(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/run.py", line 883, in run + elastic_launch( + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 139, in __call__ + return launch_agent(self._config, self._entrypoint, list(args)) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 270, in launch_agent + raise ChildFailedError( +torch.distributed.elastic.multiprocessing.errors.ChildFailedError: +============================================================ +./pretrain_gpt_profile.py FAILED +------------------------------------------------------------ +Failures: + +------------------------------------------------------------ +Root Cause (first observed failure): +[0]: + time : 2025-06-21_21:18:13 + host : fs-mbz-gpu-901 + rank : 25 (local_rank: 1) + exitcode : 1 (pid: 2065157) + error_file: + traceback : To enable traceback see: https://pytorch.org/docs/stable/elastic/errors.html +============================================================ ++ set +x ++ set +x ++ set +x ++ for ctx_length in 1024 2048 4096 8192 12288 16384 24576 32768 40960 49152 65536 81920 98304 131072 ++ export PROF_CTX_LENGTH=131072 ++ PROF_CTX_LENGTH=131072 ++ name='/mnt/sharefs/users/hao.zhang/junda/megatron-prof-data--unstable-v5/mytrace.L131072*tp8.cp4.bs32.json' ++ '[' -f '/mnt/sharefs/users/hao.zhang/junda/megatron-prof-data--unstable-v5/mytrace.L131072*tp8.cp4.bs32.json' ']' ++ echo 'Running ctx_length=131072, TP_SIZE=8, CP_SIZE=4, BATCH_SIZE=32' ++ srun bash ./attnserver.sh ++ which python3 ++ which python3 ++ python3 -m torch.distributed.launch --nproc_per_node 8 --nnodes 4 --node_rank 1 --rdzv_id 343200 --rdzv_backend c10d --rdzv_endpoint fs-mbz-gpu-852:29500 ./pretrain_gpt_profile.py --tensor-model-parallel-size 8 --context-parallel-size 4 --num-layers 2 --hidden-size 4096 --num-attention-heads 64 --group-query-attention --num-query-groups 16 --seq-length 131072 --max-position-embeddings 131072 --micro-batch-size 1 --global-batch-size 1 --lr 0.0005 --train-iters 10 --lr-decay-iters 150000 --lr-decay-style cosine --lr-warmup-iters 2 --weight-decay .1 --adam-beta2 .999 --fp16 --log-interval 1 --save-interval 16 --eval-interval 16 --eval-iters 1 --vocab-file vocab.json --merge-file merges.txt --save gpt-checkpoint --load gpt-checkpoint --logging-level 0 --mock-data --tensorboard-dir tensorboard-logs/ ++ python3 -m torch.distributed.launch --nproc_per_node 8 --nnodes 4 --node_rank 0 --rdzv_id 343200 --rdzv_backend c10d --rdzv_endpoint fs-mbz-gpu-852:29500 ./pretrain_gpt_profile.py --tensor-model-parallel-size 8 --context-parallel-size 4 --num-layers 2 --hidden-size 4096 --num-attention-heads 64 --group-query-attention --num-query-groups 16 --seq-length 131072 --max-position-embeddings 131072 --micro-batch-size 1 --global-batch-size 1 --lr 0.0005 --train-iters 10 --lr-decay-iters 150000 --lr-decay-style cosine --lr-warmup-iters 2 --weight-decay .1 --adam-beta2 .999 --fp16 --log-interval 1 --save-interval 16 --eval-interval 16 --eval-iters 1 --vocab-file vocab.json --merge-file merges.txt --save gpt-checkpoint --load gpt-checkpoint --logging-level 0 --mock-data --tensorboard-dir tensorboard-logs/ ++ which python3 ++ python3 -m torch.distributed.launch --nproc_per_node 8 --nnodes 4 --node_rank 2 --rdzv_id 343200 --rdzv_backend c10d --rdzv_endpoint fs-mbz-gpu-852:29500 ./pretrain_gpt_profile.py --tensor-model-parallel-size 8 --context-parallel-size 4 --num-layers 2 --hidden-size 4096 --num-attention-heads 64 --group-query-attention --num-query-groups 16 --seq-length 131072 --max-position-embeddings 131072 --micro-batch-size 1 --global-batch-size 1 --lr 0.0005 --train-iters 10 --lr-decay-iters 150000 --lr-decay-style cosine --lr-warmup-iters 2 --weight-decay .1 --adam-beta2 .999 --fp16 --log-interval 1 --save-interval 16 --eval-interval 16 --eval-iters 1 --vocab-file vocab.json --merge-file merges.txt --save gpt-checkpoint --load gpt-checkpoint --logging-level 0 --mock-data --tensorboard-dir tensorboard-logs/ ++ which python3 ++ python3 -m torch.distributed.launch --nproc_per_node 8 --nnodes 4 --node_rank 3 --rdzv_id 343200 --rdzv_backend c10d --rdzv_endpoint fs-mbz-gpu-852:29500 ./pretrain_gpt_profile.py --tensor-model-parallel-size 8 --context-parallel-size 4 --num-layers 2 --hidden-size 4096 --num-attention-heads 64 --group-query-attention --num-query-groups 16 --seq-length 131072 --max-position-embeddings 131072 --micro-batch-size 1 --global-batch-size 1 --lr 0.0005 --train-iters 10 --lr-decay-iters 150000 --lr-decay-style cosine --lr-warmup-iters 2 --weight-decay .1 --adam-beta2 .999 --fp16 --log-interval 1 --save-interval 16 --eval-interval 16 --eval-iters 1 --vocab-file vocab.json --merge-file merges.txt --save gpt-checkpoint --load gpt-checkpoint --logging-level 0 --mock-data --tensorboard-dir tensorboard-logs/ +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py:207: FutureWarning: The module torch.distributed.launch is deprecated +and will be removed in future. Use torchrun. +Note that --use-env is set by default in torchrun. +If your script expects `--local-rank` argument to be set, please +change it to read from `os.environ['LOCAL_RANK']` instead. See +https://pytorch.org/docs/stable/distributed.html#launch-utility for +further instructions + + main() +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py:207: FutureWarning: The module torch.distributed.launch is deprecated +and will be removed in future. Use torchrun. +Note that --use-env is set by default in torchrun. +If your script expects `--local-rank` argument to be set, please +change it to read from `os.environ['LOCAL_RANK']` instead. See +https://pytorch.org/docs/stable/distributed.html#launch-utility for +further instructions + + main() +W0621 21:18:18.894000 2066939 site-packages/torch/distributed/run.py:766] +W0621 21:18:18.894000 2066939 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:18:18.894000 2066939 site-packages/torch/distributed/run.py:766] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0621 21:18:18.894000 2066939 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:18:18.897000 147855 site-packages/torch/distributed/run.py:766] +W0621 21:18:18.897000 147855 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:18:18.897000 147855 site-packages/torch/distributed/run.py:766] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0621 21:18:18.897000 147855 site-packages/torch/distributed/run.py:766] ***************************************** +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py:207: FutureWarning: The module torch.distributed.launch is deprecated +and will be removed in future. Use torchrun. +Note that --use-env is set by default in torchrun. +If your script expects `--local-rank` argument to be set, please +change it to read from `os.environ['LOCAL_RANK']` instead. See +https://pytorch.org/docs/stable/distributed.html#launch-utility for +further instructions + + main() +W0621 21:18:18.972000 3436378 site-packages/torch/distributed/run.py:766] +W0621 21:18:18.972000 3436378 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:18:18.972000 3436378 site-packages/torch/distributed/run.py:766] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0621 21:18:18.972000 3436378 site-packages/torch/distributed/run.py:766] ***************************************** +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py:207: FutureWarning: The module torch.distributed.launch is deprecated +and will be removed in future. Use torchrun. +Note that --use-env is set by default in torchrun. +If your script expects `--local-rank` argument to be set, please +change it to read from `os.environ['LOCAL_RANK']` instead. See +https://pytorch.org/docs/stable/distributed.html#launch-utility for +further instructions + + main() +W0621 21:18:18.988000 3367613 site-packages/torch/distributed/run.py:766] +W0621 21:18:18.988000 3367613 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:18:18.988000 3367613 site-packages/torch/distributed/run.py:766] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0621 21:18:18.988000 3367613 site-packages/torch/distributed/run.py:766] *****************************************