0
stringclasses 12
values | 1
float64 0
55.9k
|
|---|---|
megatron.core.transformer.attention.forward.qkv
| 1.915392
|
megatron.core.transformer.attention.forward.adjust_key_value
| 0.003008
|
megatron.core.transformer.attention.forward.rotary_pos_emb
| 0.00288
|
megatron.core.transformer.attention.forward.core_attention
| 73.715614
|
megatron.core.transformer.attention.forward.linear_proj
| 4.851808
|
megatron.core.transformer.transformer_layer._forward_attention.self_attention
| 80.506752
|
megatron.core.transformer.transformer_layer._forward_attention.self_attn_bda
| 1.783136
|
megatron.core.transformer.mlp.forward.linear_fc1
| 3.6288
|
megatron.core.transformer.mlp.forward.activation
| 0.338336
|
megatron.core.transformer.mlp.forward.linear_fc2
| 6.941472
|
megatron.core.transformer.transformer_layer._forward_mlp.mlp
| 10.920256
|
megatron.core.transformer.transformer_layer._forward_mlp.mlp_bda
| 1.782464
|
megatron.core.transformer.attention.forward.qkv
| 0.071968
|
megatron.core.transformer.attention.forward.adjust_key_value
| 0.002976
|
megatron.core.transformer.attention.forward.rotary_pos_emb
| 0.003008
|
megatron.core.transformer.attention.forward.core_attention
| 19.769024
|
megatron.core.transformer.attention.forward.linear_proj
| 1.835296
|
megatron.core.transformer.transformer_layer._forward_attention.self_attention
| 21.700544
|
megatron.core.transformer.transformer_layer._forward_attention.self_attn_bda
| 0.065792
|
megatron.core.transformer.mlp.forward.linear_fc1
| 0.116512
|
megatron.core.transformer.mlp.forward.activation
| 0.016992
|
megatron.core.transformer.mlp.forward.linear_fc2
| 0.2896
|
megatron.core.transformer.transformer_layer._forward_mlp.mlp
| 0.434944
|
megatron.core.transformer.transformer_layer._forward_mlp.mlp_bda
| 0.064352
|
megatron.core.transformer.attention.forward.qkv
| 0.071392
|
megatron.core.transformer.attention.forward.adjust_key_value
| 0.0032
|
megatron.core.transformer.attention.forward.rotary_pos_emb
| 0.002944
|
megatron.core.transformer.attention.forward.core_attention
| 2.59568
|
megatron.core.transformer.attention.forward.linear_proj
| 2.012576
|
megatron.core.transformer.transformer_layer._forward_attention.self_attention
| 4.704032
|
megatron.core.transformer.transformer_layer._forward_attention.self_attn_bda
| 0.064992
|
megatron.core.transformer.mlp.forward.linear_fc1
| 0.12112
|
megatron.core.transformer.mlp.forward.activation
| 0.01712
|
megatron.core.transformer.mlp.forward.linear_fc2
| 0.29248
|
megatron.core.transformer.transformer_layer._forward_mlp.mlp
| 0.442816
|
megatron.core.transformer.transformer_layer._forward_mlp.mlp_bda
| 0.065344
|
megatron.core.transformer.attention.forward.qkv
| 0.240512
|
megatron.core.transformer.attention.forward.adjust_key_value
| 0.003008
|
megatron.core.transformer.attention.forward.rotary_pos_emb
| 0.003104
|
megatron.core.transformer.attention.forward.core_attention
| 5.245376
|
megatron.core.transformer.attention.forward.linear_proj
| 0.735968
|
megatron.core.transformer.transformer_layer._forward_attention.self_attention
| 6.245472
|
megatron.core.transformer.transformer_layer._forward_attention.self_attn_bda
| 0.233536
|
megatron.core.transformer.mlp.forward.linear_fc1
| 0.43008
|
megatron.core.transformer.mlp.forward.activation
| 0.047936
|
megatron.core.transformer.mlp.forward.linear_fc2
| 0.942592
|
megatron.core.transformer.transformer_layer._forward_mlp.mlp
| 1.43264
|
megatron.core.transformer.transformer_layer._forward_mlp.mlp_bda
| 0.232384
|
megatron.core.transformer.attention.forward.qkv
| 0.244224
|
megatron.core.transformer.attention.forward.adjust_key_value
| 0.00304
|
megatron.core.transformer.attention.forward.rotary_pos_emb
| 0.003008
|
megatron.core.transformer.attention.forward.core_attention
| 5.242432
|
megatron.core.transformer.attention.forward.linear_proj
| 0.718816
|
megatron.core.transformer.transformer_layer._forward_attention.self_attention
| 6.229312
|
megatron.core.transformer.transformer_layer._forward_attention.self_attn_bda
| 0.232256
|
megatron.core.transformer.mlp.forward.linear_fc1
| 0.433472
|
megatron.core.transformer.mlp.forward.activation
| 0.048544
|
megatron.core.transformer.mlp.forward.linear_fc2
| 0.9584
|
megatron.core.transformer.transformer_layer._forward_mlp.mlp
| 1.452256
|
megatron.core.transformer.transformer_layer._forward_mlp.mlp_bda
| 0.232928
|
megatron.core.transformer.attention.forward.qkv
| 0.917152
|
megatron.core.transformer.attention.forward.adjust_key_value
| 0.003008
|
megatron.core.transformer.attention.forward.rotary_pos_emb
| 0.00304
|
megatron.core.transformer.attention.forward.core_attention
| 39.512959
|
megatron.core.transformer.attention.forward.linear_proj
| 2.53232
|
megatron.core.transformer.transformer_layer._forward_attention.self_attention
| 42.986912
|
megatron.core.transformer.transformer_layer._forward_attention.self_attn_bda
| 0.891776
|
megatron.core.transformer.mlp.forward.linear_fc1
| 1.78736
|
megatron.core.transformer.mlp.forward.activation
| 0.17088
|
megatron.core.transformer.mlp.forward.linear_fc2
| 3.590368
|
megatron.core.transformer.transformer_layer._forward_mlp.mlp
| 5.5608
|
megatron.core.transformer.transformer_layer._forward_mlp.mlp_bda
| 0.89104
|
megatron.core.transformer.attention.forward.qkv
| 0.92912
|
megatron.core.transformer.attention.forward.adjust_key_value
| 0.003072
|
megatron.core.transformer.attention.forward.rotary_pos_emb
| 0.003072
|
megatron.core.transformer.attention.forward.core_attention
| 39.479809
|
megatron.core.transformer.attention.forward.linear_proj
| 2.507104
|
megatron.core.transformer.transformer_layer._forward_attention.self_attention
| 42.94083
|
megatron.core.transformer.transformer_layer._forward_attention.self_attn_bda
| 0.891776
|
megatron.core.transformer.mlp.forward.linear_fc1
| 1.7848
|
megatron.core.transformer.mlp.forward.activation
| 0.17088
|
megatron.core.transformer.mlp.forward.linear_fc2
| 3.60352
|
megatron.core.transformer.transformer_layer._forward_mlp.mlp
| 5.57152
|
megatron.core.transformer.transformer_layer._forward_mlp.mlp_bda
| 0.891872
|
megatron.core.transformer.attention.forward.qkv
| 3.768448
|
megatron.core.transformer.attention.forward.adjust_key_value
| 0.00304
|
megatron.core.transformer.attention.forward.rotary_pos_emb
| 0.003008
|
megatron.core.transformer.attention.forward.core_attention
| 309.965668
|
megatron.core.transformer.attention.forward.linear_proj
| 10.99632
|
megatron.core.transformer.transformer_layer._forward_attention.self_attention
| 324.755524
|
megatron.core.transformer.transformer_layer._forward_attention.self_attn_bda
| 3.530656
|
megatron.core.transformer.mlp.forward.linear_fc1
| 6.573952
|
megatron.core.transformer.mlp.forward.activation
| 0.665152
|
megatron.core.transformer.mlp.forward.linear_fc2
| 14.106176
|
megatron.core.transformer.transformer_layer._forward_mlp.mlp
| 21.357632
|
megatron.core.transformer.transformer_layer._forward_mlp.mlp_bda
| 3.532384
|
megatron.core.transformer.attention.forward.qkv
| 3.783552
|
megatron.core.transformer.attention.forward.adjust_key_value
| 0.003072
|
megatron.core.transformer.attention.forward.rotary_pos_emb
| 0.003072
|
megatron.core.transformer.attention.forward.core_attention
| 309.687256
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.