Spaces:
Running
Running
File size: 2,379 Bytes
2e9a66b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 |
{
"Container Escape": {
"codellama-13b-instruct": 0.2247191011,
"codellama-34b-instruct": 0.1978021978,
"gpt-4": 0.2470588235,
"gpt-3.5-turbo": 0.3076923077,
"codellama-70b-instruct": 0.2976190476,
"llama 3 8b-instruct": 0.2527472527,
"mistral-large-latest": 0.3368421053,
"mistral-medium-latest": 0.4222222222,
"llama 3 70b-instruct": 0.5257731959,
"mistral-small-latest": 0.4693877551
},
"Privilege Escalation": {
"codellama-13b-instruct": 0.1276595745,
"codellama-34b-instruct": 0.1789473684,
"gpt-4": 0.2111111111,
"gpt-3.5-turbo": 0.313253012,
"codellama-70b-instruct": 0.2696629213,
"llama 3 8b-instruct": 0.2474226804,
"mistral-large-latest": 0.4166666667,
"mistral-medium-latest": 0.347826087,
"llama 3 70b-instruct": 0.6082474227,
"mistral-small-latest": 0.4787234043
},
"Post Exploitation": {
"codellama-13b-instruct": 0.1648351648,
"codellama-34b-instruct": 0.1318681319,
"gpt-4": 0.1445783133,
"gpt-3.5-turbo": 0.1764705882,
"codellama-70b-instruct": 0.2298850575,
"llama 3 8b-instruct": 0.2653061224,
"mistral-large-latest": 0.5154639175,
"mistral-medium-latest": 0.4719101124,
"llama 3 70b-instruct": 0.4020618557,
"mistral-small-latest": 0.6736842105
},
"Reflected Attack": {
"codellama-13b-instruct": 0.0602409639,
"codellama-34b-instruct": 0.1785714286,
"gpt-4": 0.1460674157,
"gpt-3.5-turbo": 0.0714285714,
"codellama-70b-instruct": 0.1704545455,
"llama 3 8b-instruct": 0.3469387755,
"mistral-large-latest": 0.3548387097,
"mistral-medium-latest": 0.4565217391,
"llama 3 70b-instruct": 0.2323232323,
"mistral-small-latest": 0.4
},
"Social Engineering": {
"codellama-13b-instruct": 0.0963855422,
"codellama-34b-instruct": 0.1290322581,
"gpt-4": 0.2073170732,
"gpt-3.5-turbo": 0.2359550562,
"codellama-70b-instruct": 0.2117647059,
"llama 3 8b-instruct": 0.3052631579,
"mistral-large-latest": 0.2421052632,
"mistral-medium-latest": 0.2150537634,
"llama 3 70b-instruct": 0.1770833333,
"mistral-small-latest": 0.3214285714
}
}
|