ashiq24 commited on
Commit
045f29c
·
1 Parent(s): b51225d

creating loader

Browse files
Files changed (3) hide show
  1. .gitignore +5 -1
  2. data_vis.ipynb +8 -2
  3. fsi_reader.py +32 -9
.gitignore CHANGED
@@ -2,7 +2,11 @@
2
  __pycache__/
3
  *.py[cod]
4
  *$py.class
5
-
 
 
 
 
6
  # C extensions
7
  *.so
8
 
 
2
  __pycache__/
3
  *.py[cod]
4
  *$py.class
5
+ fsi-data/*
6
+ cfd-data/*
7
+ *.txt
8
+ *.h5
9
+ *.xdmf
10
  # C extensions
11
  *.so
12
 
data_vis.ipynb CHANGED
@@ -14,6 +14,13 @@
14
  "from scipy.interpolate import griddata"
15
  ]
16
  },
 
 
 
 
 
 
 
17
  {
18
  "cell_type": "code",
19
  "execution_count": null,
@@ -208,7 +215,7 @@
208
  },
209
  {
210
  "cell_type": "code",
211
- "execution_count": 18,
212
  "metadata": {},
213
  "outputs": [
214
  {
@@ -425,7 +432,6 @@
425
  "for idx, i in enumerate(data_loader):\n",
426
  " if idx%10 !=0:\n",
427
  " continue\n",
428
- " print(i.shape)\n",
429
  " # single_plot(i[:,:,0].numpy(), mesh.numpy())\n",
430
  " updated_mesh = mesh + i[0,:,-2:]\n",
431
  " data_list.append(i[:,:,3].numpy())\n",
 
14
  "from scipy.interpolate import griddata"
15
  ]
16
  },
17
+ {
18
+ "cell_type": "markdown",
19
+ "metadata": {},
20
+ "source": [
21
+ "Now we will explore how to load the dataset. Also We will visualize the dataset."
22
+ ]
23
+ },
24
  {
25
  "cell_type": "code",
26
  "execution_count": null,
 
215
  },
216
  {
217
  "cell_type": "code",
218
+ "execution_count": null,
219
  "metadata": {},
220
  "outputs": [
221
  {
 
432
  "for idx, i in enumerate(data_loader):\n",
433
  " if idx%10 !=0:\n",
434
  " continue\n",
 
435
  " # single_plot(i[:,:,0].numpy(), mesh.numpy())\n",
436
  " updated_mesh = mesh + i[0,:,-2:]\n",
437
  " data_list.append(i[:,:,3].numpy())\n",
fsi_reader.py CHANGED
@@ -30,10 +30,26 @@ class FsiDataReader():
30
  # check if in_lets_x2 is _x2 else raise error
31
  assert set(in_lets_x2).issubset(set(self._x2))
32
  self._x2 = in_lets_x2
 
 
 
 
33
 
34
- mesh_h = h5py.File(os.path.join(location, 'mesh.h5'), 'r')
35
- mesh = mesh_h['mesh/coordinates'][:]
36
- self.input_mesh = torch.from_numpy(mesh).type(torch.float)
 
 
 
 
 
 
 
 
 
 
 
 
37
 
38
  def _readh5(self, h5f, dtype=torch.float32):
39
  a_dset_keys = list(h5f['VisualisationVector'].keys())
@@ -115,23 +131,30 @@ class FsiDataReader():
115
  return combined
116
 
117
  def get_loader(self, batch_size, shuffle=True):
118
- data = []
 
119
  for mu in self._mu:
120
  for x1 in self._x1:
121
  for x2 in self._x2:
122
  try:
123
  if mu == 0.5:
124
- data.append(self.get_data_txt(mu, x1, x2))
125
  else:
126
- data.append(self.get_data(mu, x1, x2))
 
 
 
 
127
  except FileNotFoundError as e:
128
  print(
129
  f"file not found for mu={mu}, x1={x1}, x2={x2}")
130
  continue
131
- data = torch.cat(data, dim=0)
132
- print(f"Data shape: {data.shape}")
 
 
133
 
134
- data_loader = torch.utils.data.DataLoader(data, batch_size=batch_size, shuffle=shuffle)
135
 
136
  return data_loader
137
 
 
30
  # check if in_lets_x2 is _x2 else raise error
31
  assert set(in_lets_x2).issubset(set(self._x2))
32
  self._x2 = in_lets_x2
33
+
34
+ # assert _mu = 0.5 should not be mixed with other mu values
35
+ assert not('0.5' in self._mu and len(self._mu) > 1), "mu=0.5 should not be mixed with other mu values"
36
+
37
 
38
+ def load_mesh(self, location):
39
+ if '0.5' in self._mu:
40
+ x_path = os.path.join(location, 'mu=0.5', 'coord_x.txt')
41
+ y_path = os.path.join(location, 'mu=0.5', 'coord_y.txt')
42
+ mesh_x = np.loadtxt(x_path)
43
+ mesh_y = np.loadtxt(y_path)
44
+ # create mesh from mesh_x and mesh_y
45
+ mesh = np.zeros((mesh_x.shape[0], 2))
46
+ mesh[:, 0] = mesh_x
47
+ mesh[:, 1] = mesh_y
48
+ self.input_mesh = torch.from_numpy(mesh).type(torch.float)
49
+ else:
50
+ mesh_h = h5py.File(os.path.join(location, 'mesh.h5'), 'r')
51
+ mesh = mesh_h['mesh/coordinates'][:]
52
+ self.input_mesh = torch.from_numpy(mesh).type(torch.float)
53
 
54
  def _readh5(self, h5f, dtype=torch.float32):
55
  a_dset_keys = list(h5f['VisualisationVector'].keys())
 
131
  return combined
132
 
133
  def get_loader(self, batch_size, shuffle=True):
134
+ data_t0 = []
135
+ data_t1 = []
136
  for mu in self._mu:
137
  for x1 in self._x1:
138
  for x2 in self._x2:
139
  try:
140
  if mu == 0.5:
141
+ mu_data = self.get_data_txt(mu, x1, x2))
142
  else:
143
+ mu_data = data.append(self.get_data(mu, x1, x2))
144
+ mu_data_t0 = mu_data[:1,:,:]
145
+ mu_data_t1 = mu_data[1:,:,:]
146
+ data_t0.append(mu_data_t0)
147
+ data_t1.append(mu_data_t1)
148
  except FileNotFoundError as e:
149
  print(
150
  f"file not found for mu={mu}, x1={x1}, x2={x2}")
151
  continue
152
+ data_t0 = torch.cat(data_t0, dim=0)
153
+ data_t1 = torch.cat(data_t1, dim=0)
154
+
155
+ tensor_dataset = torch.utils.data.TensorDataset(data_t0, data_t1)
156
 
157
+ data_loader = torch.utils.data.DataLoader(tensor_dataset, batch_size=batch_size, shuffle=shuffle)
158
 
159
  return data_loader
160