FSI-pde-dataset / fsi_reader.py
ashiq24's picture
final push
0e57b0f
import os
import h5py
import numpy as np
import torch
import h5py
class FsiDataReader():
def __init__(self,
location,
mu,
in_lets_x1=None,
in_lets_x2=None,):
'''
Data set of fluid solid interaction simulations.
At each time step t, the simulataion records 5 variables:
velocity_t = [vx_t, vy_t]: velocity in x and y direction,
P_t: pressure,
displacment_t = [dx_t, dy_t]: displacement in x and y direction.
The inital mesh is loaded as self.input_mesh. The mesh is a 2D mesh with 2 columns. The first column is the x coordinate and the second column is the y coordinate.
The mesh is time dependent i.e., the mesh changes with time.
The mesh at time t is given by mesh_t = self.input_mesh + displacement_t.
Parameters
----------
location : str
path to the directory containing the data
mu : list, optional
list mu vlues. The siumulations corresponding to the mu values will be loaded. The values should be one of ['0.1', '0.01', '0.5', '5', '1.0', '10.0'] and slould exactly match the string values given here. The mu='0.5' should not be loaded separately.
in_lets_x1, : list, optional
list of x1 parameter controlling the inlet boundary condition of the simulation. The values should be one of ['-4.0', '-2.0', '0.0', '2.0', '4.0', '6.0'] and slould exactly match the string values given here. default is None, which loads all the values.
in_lets_x2 : list, optional
list of x2 parameter controlling the inlet boundary condition of the simulation. The values should be one of ['-4.0', '-2.0', '0.0', '2.0', '4.0', '6.0'] and slould exactly match the string values given here. default is None, which loads all the values.
'''
self.location = location
self._x1 = ['-4.0', '-2.0', '0.0', '2.0', '4.0', '6.0']
self._x2 = ['-4.0', '-2.0', '0', '2.0', '4.0', '6.0']
self._mu = ['0.5', '5', '1.0', '10.0']
# keeping vx, xy, P, dx,dy
self.varable_idices = [0, 1, 3, 4, 5]
if mu is not None:
# check if mu is _mu else raise error
assert set(mu).issubset(set(self._mu))
self._mu = mu
if in_lets_x1 is not None:
# check if in_lets_x1 is _x1 else raise error
assert set(in_lets_x1).issubset(set(self._x1))
self._x1 = in_lets_x1
if in_lets_x2 is not None:
# check if in_lets_x2 is _x2 else raise error
assert set(in_lets_x2).issubset(set(self._x2))
self._x2 = in_lets_x2
# assert _mu = 0.5 should not be mixed with other mu values
assert not('0.5' in self._mu and len(self._mu) > 1), "mu=0.5 should not be mixed with other mu values"
self.load_mesh(location)
def load_mesh(self, location):
if '0.5' in self._mu:
x_path = os.path.join(location, 'mu=0.5', 'coord_x.txt')
y_path = os.path.join(location, 'mu=0.5', 'coord_y.txt')
mesh_x = np.loadtxt(x_path)
mesh_y = np.loadtxt(y_path)
# create mesh from mesh_x and mesh_y
mesh = np.zeros((mesh_x.shape[0], 2))
mesh[:, 0] = mesh_x
mesh[:, 1] = mesh_y
self.input_mesh = torch.from_numpy(mesh).type(torch.float)
else:
mesh_h = h5py.File(os.path.join(location, 'mesh.h5'), 'r')
mesh = mesh_h['mesh/coordinates'][:]
self.input_mesh = torch.from_numpy(mesh).type(torch.float)
def _readh5(self, h5f, dtype=torch.float32):
a_dset_keys = list(h5f['VisualisationVector'].keys())
size = len(a_dset_keys)
readings = [None for i in range(size)]
for dset in a_dset_keys:
ds_data = (h5f['VisualisationVector'][dset])
if ds_data.dtype == 'float64':
csvfmt = '%.18e'
elif ds_data.dtype == 'int64':
csvfmt = '%.10d'
else:
csvfmt = '%s'
readings[int(dset)] = torch.tensor(np.array(ds_data), dtype=dtype)
readings_tensor = torch.stack(readings, dim=0)
print(f"Loaded tensor Size: {readings_tensor.shape}")
return readings_tensor
def get_data(self, mu, x1, x2):
if mu not in self._mu:
raise ValueError(f"Value of mu must be one of {self._mu}")
if x1 not in self._x1 or x2 not in self._x2:
raise ValueError(
f"Value of is must be one of {self._ivals3} and {self._ivals12} ")
path = os.path.join(
self.location,
'mu='+str(mu),
'x1='+str(x1),
'x2='+str(x2),
'Visualization')
filename = os.path.join(path, 'displacement.h5')
h5f = h5py.File(filename, 'r')
displacements_tensor = self._readh5(h5f)
filename = os.path.join(path, 'pressure.h5')
h5f = h5py.File(filename, 'r')
pressure_tensor = self._readh5(h5f)
filename = os.path.join(path, 'velocity.h5')
h5f = h5py.File(filename, 'r')
velocity_tensor = self._readh5(h5f)
combined = torch.cat([velocity_tensor, pressure_tensor, displacements_tensor], dim=-1)[..., self.varable_idices]
# return velocity_tensor, pressure_tensor, displacements_tensor
return combined
def get_data_txt(self, mu, x1, x2):
if mu not in self._mu:
raise ValueError(f"Value of mu must be one of {self._mu}")
if x1 not in self._x1 or x2 not in self._x2:
raise ValueError(
f"Value of is must be one of {self._ivals3} and {self._ivals12} ")
path = os.path.join(
self.location,
'mu='+str(mu),
'x1='+str(x1),
'x2='+str(x2),
'1')
#try:
dis_x = torch.tensor(np.loadtxt(os.path.join(path, 'dis_x.txt')))
dis_y = torch.tensor(np.loadtxt(os.path.join(path, 'dis_y.txt')))
pressure = torch.tensor(np.loadtxt(os.path.join(path, 'pres.txt')))
velocity_x = torch.tensor(np.loadtxt(os.path.join(path, 'vel_x.txt')))
velocity_y = torch.tensor(np.loadtxt(os.path.join(path, 'vel_y.txt')))
# reshape each tensor into 2d by keeping 876 entries in each row
dis_x = dis_x.view(-1, 876,1)
dis_y = dis_y.view(-1, 876,1)
pressure = pressure.view(-1, 876,1)
velocity_x = velocity_x.view(-1, 876,1)
velocity_y = velocity_y.view(-1, 876,1)
combined = torch.cat([velocity_x, velocity_y, pressure, dis_x, dis_y], dim=-1)[..., ]
return combined
def get_loader(self, batch_size, shuffle=True):
data_t0 = []
data_t1 = []
for mu in self._mu:
for x1 in self._x1:
for x2 in self._x2:
try:
if mu == '0.5':
mu_data = self.get_data_txt(mu, x1, x2)
else:
mu_data = self.get_data(mu, x1, x2)
mu_data_t0 = mu_data[:-1,:,:]
mu_data_t1 = mu_data[1:,:,:]
data_t0.append(mu_data_t0)
data_t1.append(mu_data_t1)
except FileNotFoundError as e:
print(
f"file not found for mu={mu}, x1={x1}, x2={x2}")
continue
data_t0 = torch.cat(data_t0, dim=0)
data_t1 = torch.cat(data_t1, dim=0)
tensor_dataset = torch.utils.data.TensorDataset(data_t0, data_t1)
data_loader = torch.utils.data.DataLoader(tensor_dataset, batch_size=batch_size, shuffle=shuffle)
return data_loader