Datasets:

ArXiv:
License:
codymlewis commited on
Commit
18bcc26
·
1 Parent(s): 69c604e

Create nbaiot.py

Browse files
Files changed (1) hide show
  1. nbaiot.py +137 -0
nbaiot.py ADDED
@@ -0,0 +1,137 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2020 The HuggingFace Datasets Authors and the current dataset script contributor.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """N-BaIoT dataset loader."""
15
+
16
+
17
+ import os
18
+ import rarfile
19
+ import numpy as np
20
+ import pandas as pd
21
+ import datasets
22
+
23
+
24
+ _CITATION = """\
25
+ @article{DBLP:journals/corr/abs-1805-03409,
26
+ author = {Yair Meidan and
27
+ Michael Bohadana and
28
+ Yael Mathov and
29
+ Yisroel Mirsky and
30
+ Dominik Breitenbacher and
31
+ Asaf Shabtai and
32
+ Yuval Elovici},
33
+ title = {N-BaIoT: Network-based Detection of IoT Botnet Attacks Using Deep
34
+ Autoencoders},
35
+ journal = {CoRR},
36
+ volume = {abs/1805.03409},
37
+ year = {2018},
38
+ url = {http://arxiv.org/abs/1805.03409},
39
+ eprinttype = {arXiv},
40
+ eprint = {1805.03409},
41
+ timestamp = {Mon, 13 Aug 2018 16:49:04 +0200},
42
+ biburl = {https://dblp.org/rec/journals/corr/abs-1805-03409.bib},
43
+ bibsource = {dblp computer science bibliography, https://dblp.org}
44
+ }
45
+ """
46
+ _DESCRIPTION = """\
47
+ An intrusion detection dataset that focuses on IoT botnet attacks.
48
+ """
49
+ _HOMEPAGE = "https://archive.ics.uci.edu/dataset/442/detection+of+iot+botnet+attacks+n+baiot"
50
+ _LICENSE = "Creative Commons Attribution 4.0 International (CC BY 4.0) license"
51
+ _URL = 'https://archive.ics.uci.edu/static/public/442/detection+of+iot+botnet+attacks+n+baiot.zip'
52
+ _ATTACK_NAMES = ['benign_traffic', 'combo', 'junk', 'mirai-ack', 'mirai-scan', 'mirai-syn', 'mirai-udp', 'mirai-udpplain', 'scan', 'tcp', 'udp']
53
+ _DEVICE_NAMES = [
54
+ "Danmini_Doorbell", "Ecobee_Thermostat", "Ennio_Doorbell", "Philips_B120N10_Baby_Monitor", "Provision_PT_737E_Security_Camera",
55
+ "Provision_PT_838_Security_Camera", "Samsung_SNH_1011_N_Webcam", "SimpleHome_XCS7_1002_WHT_Security_Camera",
56
+ "SimpleHome_XCS7_1003_WHT_Security_Camera"
57
+ ]
58
+
59
+
60
+ class NBAIOTDataset(datasets.GeneratorBasedBuilder):
61
+ """N-BaIoT intrusion detection."""
62
+
63
+ VERSION = datasets.Version("1.1.0")
64
+
65
+ def _info(self):
66
+ return datasets.DatasetInfo(
67
+ # This is the description that will appear on the datasets page.
68
+ description=_DESCRIPTION,
69
+ # This defines the different columns of the dataset and their types
70
+ features=datasets.Features({
71
+ "features": datasets.Sequence(feature=datasets.Value("float32"), length=115),
72
+ "attack": datasets.ClassLabel(len(_ATTACK_NAMES), names=_ATTACK_NAMES),
73
+ "device": datasets.ClassLabel(len(_DEVICE_NAMES), names=_DEVICE_NAMES),
74
+ }),
75
+ # Homepage of the dataset for documentation
76
+ homepage=_HOMEPAGE,
77
+ # License for the dataset if available
78
+ license=_LICENSE,
79
+ # Citation for the dataset
80
+ citation=_CITATION,
81
+ )
82
+
83
+ def _split_generators(self, dl_manager):
84
+ data_dir = dl_manager.download_and_extract(_URL)
85
+ return [
86
+ datasets.SplitGenerator(
87
+ name=datasets.Split.TRAIN,
88
+ # These kwargs will be passed to _generate_examples
89
+ gen_kwargs={
90
+ "filepath": data_dir,
91
+ "split": "train",
92
+ },
93
+ ),
94
+ datasets.SplitGenerator(
95
+ name=datasets.Split.TEST,
96
+ # These kwargs will be passed to _generate_examples
97
+ gen_kwargs={
98
+ "filepath": data_dir,
99
+ "split": "test"
100
+ },
101
+ ),
102
+ ]
103
+
104
+ # method parameters are unpacked from `gen_kwargs` as given in `_split_generators`
105
+ def _generate_examples(self, filepath, split):
106
+ for device in _DEVICE_NAMES:
107
+ # First load in the benign traffic
108
+ all_data = pd.read_csv(f"{filepath}/{device}/benign_traffic.csv")
109
+ all_data['attack'] = "benign_traffic"
110
+ # Then the standard attacks
111
+ attacks_rar = rarfile.RarFile(f"{filepath}/{device}/gafgyt_attacks.rar")
112
+ for fileinfo in attacks_rar.infolist():
113
+ with attacks_rar.open(fileinfo.filename) as f:
114
+ df = pd.read_csv(f)
115
+ df['attack'] = f.name.replace(".csv", "")
116
+ all_data = pd.concat((all_data, df))
117
+ # And, if present, the Mirai attacks
118
+ if device not in ["Ennio_Doorbell", "Samsung_SNH_1011_N_Webcam"]:
119
+ mirai_rar = rarfile.RarFile(f"{filepath}/{device}/mirai_attacks.rar")
120
+ for fileinfo in mirai_rar.infolist():
121
+ with mirai_rar.open(fileinfo.filename) as f:
122
+ df = pd.read_csv(f)
123
+ df['attack'] = "mirai-" + fileinfo.filename.replace(".csv", "")
124
+ all_data = pd.concat((all_data, df))
125
+ # Create the train-test split
126
+ rng = np.random.default_rng(round(np.pi**(np.pi * 100)))
127
+ train = rng.uniform(size=len(all_data)) < 0.85
128
+ all_data = all_data.iloc[np.where(train if split == "train" else ~train)]
129
+ attacks = all_data['attack'].to_list()
130
+ all_data = all_data.drop(columns="attack")
131
+ # Finally yield the data
132
+ for (key, row), attack in zip(all_data.iterrows(), attacks):
133
+ yield key, {
134
+ "features": row.to_numpy(),
135
+ "attack": attack,
136
+ "device": device,
137
+ }