SemanticBoost / motion /dataset /recover_smr.py
kleinhe
init
c3d0293
import numpy as np
import torch
from SMPLX.rotation_conversions import rotation_6d_to_matrix, matrix_to_axis_angle
def qinv(q):
assert q.shape[-1] == 4, 'q must be a tensor of shape (*, 4)'
mask = torch.ones_like(q)
mask[..., 1:] = -mask[..., 1:]
return q * mask
def qrot(q, v):
"""
Rotate vector(s) v about the rotation described by quaternion(s) q.
Expects a tensor of shape (*, 4) for q and a tensor of shape (*, 3) for v,
where * denotes any number of dimensions.
Returns a tensor of shape (*, 3).
"""
assert q.shape[-1] == 4
assert v.shape[-1] == 3
assert q.shape[:-1] == v.shape[:-1]
original_shape = list(v.shape)
# print(q.shape)
q = q.contiguous().view(-1, 4)
v = v.contiguous().view(-1, 3)
qvec = q[:, 1:]
uv = torch.cross(qvec, v, dim=1)
uuv = torch.cross(qvec, uv, dim=1)
return (v + 2 * (q[:, :1] * uv + uuv)).view(original_shape)
def recover_root_rot_pos(data):
rot_vel = data[..., 0]
r_rot_ang = torch.zeros_like(rot_vel).to(data.device)
'''Get Y-axis rotation from rotation velocity'''
r_rot_ang[..., 1:] = rot_vel[..., :-1]
r_rot_ang = torch.cumsum(r_rot_ang, dim=-1)
r_rot_quat = torch.zeros(data.shape[:-1] + (4,)).to(data.device)
r_rot_quat[..., 0] = torch.cos(r_rot_ang)
r_rot_quat[..., 2] = torch.sin(r_rot_ang)
r_pos = torch.zeros(data.shape[:-1] + (3,)).to(data.device)
r_pos[..., 1:, [0, 2]] = data[..., :-1, 1:3]
'''Add Y-axis rotation to root position'''
r_pos = qrot(qinv(r_rot_quat), r_pos)
r_pos = torch.cumsum(r_pos, dim=-2)
r_pos[..., 1] = data[..., 3]
return r_rot_quat, r_pos
def recover_from_ric(data, joints_num):
if isinstance(data, np.ndarray):
data = torch.from_numpy(data).float()
dtype = "numpy"
else:
data = data.float()
dtype = "tensor"
r_rot_quat, r_pos = recover_root_rot_pos(data)
positions = data[..., 4:(joints_num - 1) * 3 + 4]
positions = positions.view(positions.shape[:-1] + (-1, 3))
'''Add Y-axis rotation to local joints'''
positions = qrot(qinv(r_rot_quat[..., None, :]).expand(positions.shape[:-1] + (4,)), positions)
'''Add root XZ to joints'''
positions[..., 0] += r_pos[..., 0:1]
positions[..., 2] += r_pos[..., 2:3]
'''Concate root and joints'''
positions = torch.cat([r_pos.unsqueeze(-2), positions], dim=-2)
if dtype == "numpy":
positions = positions.numpy()
return positions
def recover_pose_from_smr(data, njoints=22):
joints = recover_from_ric(data, njoints)
trans = joints[:, 0, :] - joints[0:1, 0, :]
pose = data[:, 4 + (njoints - 1) * 3:10 + (njoints - 1) * 9]
pose = pose.reshape(pose.shape[0], njoints, 6)
ptype = type(pose)
if ptype == np.ndarray:
pose = torch.from_numpy(pose).float()
pose = rotation_6d_to_matrix(pose)
pose = matrix_to_axis_angle(pose)
pose = pose.numpy()
elif ptype == torch.Tensor:
pose = rotation_6d_to_matrix(pose)
pose = matrix_to_axis_angle(pose)
pose = pose.reshape(pose.shape[0], -1)
if njoints < 24:
if ptype == np.ndarray:
addition = np.zeros([pose.shape[0], 72-njoints*3])
pose = np.concatenate([pose, addition], axis=1)
elif ptype == torch.Tensor:
addition = torch.zeros([pose.shape[0], 72-njoints*3], dtype=pose.dtype, device=pose.device)
pose = torch.cat([pose, addition], dim=1)
if ptype == np.ndarray:
pose = np.concatenate([pose, trans], axis=1)
elif ptype == torch.Tensor:
pose = torch.cat([pose, trans], dim=1)
return pose