From 75432a069503a2a51a79516f10ae8eb246e816d0 Mon Sep 17 00:00:00 2001 From: Christoph Lassner Date: Sat, 10 Jul 2021 01:05:36 -0700 Subject: [PATCH] Add OpenCV camera conversion; fix bug for camera unified PyTorch3D interface. Summary: This commit adds a new camera conversion function for OpenCV style parameters to Pulsar parameters to the library. Using this function it addresses a bug reported here: https://fb.workplace.com/groups/629644647557365/posts/1079637302558095, by using the PyTorch3D->OpenCV->Pulsar chain instead of the original direct conversion function. Both conversions are well-tested and an additional test for the full chain has been added, resulting in a more reliable solution requiring less code. Reviewed By: patricklabatut Differential Revision: D29322106 fbshipit-source-id: 13df13c2e48f628f75d9f44f19ff7f1646fb7ebd --- pytorch3d/renderer/points/pulsar/unified.py | 52 +++--- pytorch3d/utils/__init__.py | 2 + pytorch3d/utils/camera_conversions.py | 171 +++++++++++++++++- ...loud_sphere_azimuth0.0_fovorthographic.png | Bin 1930 -> 1915 bytes ...ntcloud_sphere_azimuth0.0_orthographic.png | Bin 1930 -> 1915 bytes ...oud_sphere_azimuth90.0_fovorthographic.png | Bin 2112 -> 2104 bytes ...tcloud_sphere_azimuth90.0_orthographic.png | Bin 2112 -> 2104 bytes tests/test_camera_conversions.py | 82 +++++++++ 8 files changed, 275 insertions(+), 32 deletions(-) diff --git a/pytorch3d/renderer/points/pulsar/unified.py b/pytorch3d/renderer/points/pulsar/unified.py index 5c271f34d..1b9689582 100644 --- a/pytorch3d/renderer/points/pulsar/unified.py +++ b/pytorch3d/renderer/points/pulsar/unified.py @@ -11,7 +11,7 @@ import torch import torch.nn as nn -from ....transforms import matrix_to_rotation_6d +from ....utils import pulsar_from_cameras_projection from ...cameras import ( FoVOrthographicCameras, FoVPerspectiveCameras, @@ -102,7 +102,7 @@ def __init__( height=height, max_num_balls=max_num_spheres, orthogonal_projection=orthogonal_projection, - right_handed_system=True, + right_handed_system=False, n_channels=n_channels, **kwargs, ) @@ -359,24 +359,28 @@ def _extract_intrinsics( # noqa: C901 def _extract_extrinsics( self, kwargs, cloud_idx ) -> Tuple[torch.Tensor, torch.Tensor]: + """ + Extract the extrinsic information from the kwargs for a specific point cloud. + + Instead of implementing a direct translation from the PyTorch3D to the Pulsar + camera model, we chain the two conversions of PyTorch3D->OpenCV and + OpenCV->Pulsar for better maintainability (PyTorch3D->OpenCV is maintained and + tested by the core PyTorch3D team, whereas OpenCV->Pulsar is maintained and + tested by the Pulsar team). + """ # Shorthand: cameras = self.rasterizer.cameras R = kwargs.get("R", cameras.R)[cloud_idx] T = kwargs.get("T", cameras.T)[cloud_idx] - norm_mat = torch.tensor( - [[1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, -1.0]], - dtype=torch.float32, - device=R.device, + tmp_cams = PerspectiveCameras( + R=R.unsqueeze(0), T=T.unsqueeze(0), device=R.device ) - cam_rot = torch.matmul(norm_mat, R[:3, :3][None, ...]).permute((0, 2, 1)) - norm_mat = torch.tensor( - [[-1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, 1.0]], - dtype=torch.float32, - device=R.device, + size_tensor = torch.tensor( + [[self.renderer._renderer.height, self.renderer._renderer.width]] ) - cam_rot = torch.matmul(norm_mat, cam_rot) - cam_pos = torch.flatten(torch.matmul(cam_rot, T[..., None])) - cam_rot = torch.flatten(matrix_to_rotation_6d(cam_rot)) + pulsar_cam = pulsar_from_cameras_projection(tmp_cams, size_tensor) + cam_pos = pulsar_cam[0, :3] + cam_rot = pulsar_cam[0, 3:9] return cam_pos, cam_rot def _get_vert_rad( @@ -547,15 +551,17 @@ def forward(self, point_clouds, **kwargs) -> torch.Tensor: otherargs["bg_col"] = bg_col # Go! images.append( - self.renderer( - vert_pos=vert_pos, - vert_col=vert_col, - vert_rad=vert_rad, - cam_params=cam_params, - gamma=gamma, - max_depth=zfar, - min_depth=znear, - **otherargs, + torch.flipud( + self.renderer( + vert_pos=vert_pos, + vert_col=vert_col, + vert_rad=vert_rad, + cam_params=cam_params, + gamma=gamma, + max_depth=zfar, + min_depth=znear, + **otherargs, + ) ) ) return torch.stack(images, dim=0) diff --git a/pytorch3d/utils/__init__.py b/pytorch3d/utils/__init__.py index 90e7fe8a3..ec0eeeb86 100644 --- a/pytorch3d/utils/__init__.py +++ b/pytorch3d/utils/__init__.py @@ -7,6 +7,8 @@ from .camera_conversions import ( cameras_from_opencv_projection, opencv_from_cameras_projection, + pulsar_from_opencv_projection, + pulsar_from_cameras_projection, ) from .ico_sphere import ico_sphere from .torus import torus diff --git a/pytorch3d/utils/camera_conversions.py b/pytorch3d/utils/camera_conversions.py index 866090cfb..265f68c76 100644 --- a/pytorch3d/utils/camera_conversions.py +++ b/pytorch3d/utils/camera_conversions.py @@ -4,12 +4,16 @@ # This source code is licensed under the BSD-style license found in the # LICENSE file in the root directory of this source tree. +import logging from typing import Tuple import torch from ..renderer import PerspectiveCameras -from ..transforms import so3_exp_map, so3_log_map +from ..transforms import matrix_to_rotation_6d + + +LOGGER = logging.getLogger(__name__) def cameras_from_opencv_projection( @@ -54,7 +58,6 @@ def cameras_from_opencv_projection( Returns: cameras_pytorch3d: A batch of `N` cameras in the PyTorch3D convention. """ - focal_length = torch.stack([camera_matrix[:, 0, 0], camera_matrix[:, 1, 1]], dim=-1) principal_point = camera_matrix[:, :2, 2] @@ -68,7 +71,7 @@ def cameras_from_opencv_projection( # For R, T we flip x, y axes (opencv screen space has an opposite # orientation of screen axes). # We also transpose R (opencv multiplies points from the opposite=left side). - R_pytorch3d = R.permute(0, 2, 1) + R_pytorch3d = R.clone().permute(0, 2, 1) T_pytorch3d = tvec.clone() R_pytorch3d[:, :, :2] *= -1 T_pytorch3d[:, :2] *= -1 @@ -103,20 +106,22 @@ def opencv_from_cameras_projection( cameras: A batch of `N` cameras in the PyTorch3D convention. image_size: A tensor of shape `(N, 2)` containing the sizes of the images (height, width) attached to each camera. + return_as_rotmat (bool): If set to True, return the full 3x3 rotation + matrices. Otherwise, return an axis-angle vector (default). Returns: R: A batch of rotation matrices of shape `(N, 3, 3)`. tvec: A batch of translation vectors of shape `(N, 3)`. camera_matrix: A batch of camera calibration matrices of shape `(N, 3, 3)`. """ - R_pytorch3d = cameras.R - T_pytorch3d = cameras.T + R_pytorch3d = cameras.R.clone() # pyre-ignore + T_pytorch3d = cameras.T.clone() # pyre-ignore focal_pytorch3d = cameras.focal_length p0_pytorch3d = cameras.principal_point - T_pytorch3d[:, :2] *= -1 # pyre-ignore - R_pytorch3d[:, :, :2] *= -1 # pyre-ignore - tvec = T_pytorch3d.clone() # pyre-ignore - R = R_pytorch3d.permute(0, 2, 1) # pyre-ignore + T_pytorch3d[:, :2] *= -1 + R_pytorch3d[:, :, :2] *= -1 + tvec = T_pytorch3d + R = R_pytorch3d.permute(0, 2, 1) # Retype the image_size correctly and flip to width, height. image_size_wh = image_size.to(R).flip(dims=(1,)) @@ -130,3 +135,151 @@ def opencv_from_cameras_projection( camera_matrix[:, 0, 0] = focal_length[:, 0] camera_matrix[:, 1, 1] = focal_length[:, 1] return R, tvec, camera_matrix + + +def pulsar_from_opencv_projection( + R: torch.Tensor, + tvec: torch.Tensor, + camera_matrix: torch.Tensor, + image_size: torch.Tensor, + znear: float = 0.1, +) -> torch.Tensor: + """ + Convert OpenCV style camera parameters to Pulsar style camera parameters. + + Note: + * Pulsar does NOT support different focal lengths for x and y. + For conversion, we use the average of fx and fy. + * The Pulsar renderer MUST use a left-handed coordinate system for this + mapping to work. + * The resulting image will be vertically flipped - which has to be + addressed AFTER rendering by the user. + * The parameters `R, tvec, camera_matrix` correspond to the outputs + of `cv2.decomposeProjectionMatrix`. + + Args: + R: A batch of rotation matrices of shape `(N, 3, 3)`. + tvec: A batch of translation vectors of shape `(N, 3)`. + camera_matrix: A batch of camera calibration matrices of shape `(N, 3, 3)`. + image_size: A tensor of shape `(N, 2)` containing the sizes of the images + (height, width) attached to each camera. + znear (float): The near clipping value to use for Pulsar. + + Returns: + cameras_pulsar: A batch of `N` Pulsar camera vectors in the Pulsar + convention `(N, 13)` (3 translation, 6 rotation, focal_length, sensor_width, + c_x, c_y). + """ + assert len(camera_matrix.size()) == 3, "This function requires batched inputs!" + assert len(R.size()) == 3, "This function requires batched inputs!" + assert len(tvec.size()) in (2, 3), "This function reuqires batched inputs!" + + # Validate parameters. + image_size_wh = image_size.to(R).flip(dims=(1,)) + assert torch.all( + image_size_wh > 0 + ), "height and width must be positive but min is: %s" % ( + str(image_size_wh.min().item()) + ) + assert ( + camera_matrix.size(1) == 3 and camera_matrix.size(2) == 3 + ), "Incorrect camera matrix shape: expected 3x3 but got %dx%d" % ( + camera_matrix.size(1), + camera_matrix.size(2), + ) + assert ( + R.size(1) == 3 and R.size(2) == 3 + ), "Incorrect R shape: expected 3x3 but got %dx%d" % ( + R.size(1), + R.size(2), + ) + if len(tvec.size()) == 2: + tvec = tvec.unsqueeze(2) + assert ( + tvec.size(1) == 3 and tvec.size(2) == 1 + ), "Incorrect tvec shape: expected 3x1 but got %dx%d" % ( + tvec.size(1), + tvec.size(2), + ) + # Check batch size. + batch_size = camera_matrix.size(0) + assert R.size(0) == batch_size, "Expected R to have batch size %d. Has size %d." % ( + batch_size, + R.size(0), + ) + assert ( + tvec.size(0) == batch_size + ), "Expected tvec to have batch size %d. Has size %d." % ( + batch_size, + tvec.size(0), + ) + # Check image sizes. + image_w = image_size_wh[0, 0] + image_h = image_size_wh[0, 1] + assert torch.all( + image_size_wh[:, 0] == image_w + ), "All images in a batch must have the same width!" + assert torch.all( + image_size_wh[:, 1] == image_h + ), "All images in a batch must have the same height!" + # Focal length. + fx = camera_matrix[:, 0, 0].unsqueeze(1) + fy = camera_matrix[:, 1, 1].unsqueeze(1) + # Check that we introduce less than 1% error by averaging the focal lengths. + fx_y = fx / fy + if torch.any(fx_y > 1.01) or torch.any(fx_y < 0.99): + LOGGER.warning( + "Pulsar only supports a single focal lengths. For converting OpenCV " + "focal lengths, we average them for x and y directions. " + "The focal lengths for x and y you provided differ by more than 1%, " + "which means this could introduce a noticeable error." + ) + f = (fx + fy) / 2 + # Normalize f into normalized device coordinates. + focal_length_px = f / image_w + # Transfer into focal_length and sensor_width. + focal_length = torch.tensor([znear - 1e-5], dtype=torch.float32, device=R.device) + focal_length = focal_length[None, :].repeat(batch_size, 1) + sensor_width = focal_length / focal_length_px + # Principal point. + cx = camera_matrix[:, 0, 2].unsqueeze(1) + cy = camera_matrix[:, 1, 2].unsqueeze(1) + # Transfer principal point offset into centered offset. + cx = -(cx - image_w / 2) + cy = cy - image_h / 2 + # Concatenate to final vector. + param = torch.cat([focal_length, sensor_width, cx, cy], dim=1) + R_trans = R.permute(0, 2, 1) + cam_pos = -torch.bmm(R_trans, tvec).squeeze(2) + cam_rot = matrix_to_rotation_6d(R_trans) + cam_params = torch.cat([cam_pos, cam_rot, param], dim=1) + return cam_params + + +def pulsar_from_cameras_projection( + cameras: PerspectiveCameras, + image_size: torch.Tensor, +) -> torch.Tensor: + """ + Convert PyTorch3D `PerspectiveCameras` to Pulsar style camera parameters. + + Note: + * Pulsar does NOT support different focal lengths for x and y. + For conversion, we use the average of fx and fy. + * The Pulsar renderer MUST use a left-handed coordinate system for this + mapping to work. + * The resulting image will be vertically flipped - which has to be + addressed AFTER rendering by the user. + + Args: + cameras: A batch of `N` cameras in the PyTorch3D convention. + image_size: A tensor of shape `(N, 2)` containing the sizes of the images + (height, width) attached to each camera. + + Returns: + cameras_pulsar: A batch of `N` Pulsar camera vectors in the Pulsar + convention `(N, 13)` (3 translation, 6 rotation, focal_length, sensor_width, + c_x, c_y). + """ + opencv_R, opencv_T, opencv_K = opencv_from_cameras_projection(cameras, image_size) + return pulsar_from_opencv_projection(opencv_R, opencv_T, opencv_K, image_size) diff --git a/tests/data/test_pulsar_simple_pointcloud_sphere_azimuth0.0_fovorthographic.png b/tests/data/test_pulsar_simple_pointcloud_sphere_azimuth0.0_fovorthographic.png index cb6b5cec17ca25ca53641773b883a6dbd75e58e7..34c15cfdc9e26040b97136f753efadcf641438d2 100644 GIT binary patch delta 1675 zcmV;626Xv~5Bm;~B!5ClL_t(|obBDqmZT^U0MO1l{r{hw&cYgo0YuYOM7ej-r@E>i z3Iw335wAZSVVb7-*VpS6e-X>Sb5rxl=HHCZ{Vpf_bM*l{%Io!-2S@lK{?T>Eb)qEl z=QF+0$=>Eb`}cmoiyc2r)6dV-CYs_R5D0)lCBx!gn}JWg-!<=itUvW+`M~@Ao~CIY zACEWv>*@mm00000000000000000000cm`>V9%4?;Gv}p}ooYLg>@Iki-GQ}3q$|+_V(lIQXVL7tG`ztOe$QRTzw!nqs4aU z=ps0tv0Tz|s`v`CmjDPi@4_v-$}JA~k`I>v000000002sVLT7?C*li4c=v-{X2RepuJ@7&wnZ* z*Qt^=&r?D*cvF1n00`opPy8Z|6`+}`cAf&@kIUa4BzvJ|1VEWIEe#ZtxD*p4pGN?c zSqBwl%^Hp&~y(ErF`k3uAaeq#sN`X53w0ssI2000000000000000 z00000+>s2DzhoG{)>ZAVr~lXVJM#PTrBG!0lJ+0H9B(KjTC6pF#Sx7qDKr`n`ZhIQ zho0mVnY-3%DbWU%B8FZ8R_p9q1kP5km)_t2h7N#af>CCoy?>?RPR$G*00|YRdyq^? zXLpAH_^-Z`>RP>V4RE?}-)(N|XEw!c39X@39!00000007_{eLZ#be!nkyCYAxs zkKc9lLd^UizJL5YG!T*w^%4N}aW4iqQ2fr*->VO1k)6+W^VZS3Y&&$Z0wnLn73X`4 zz>=_atmimlHgA5ykn1G?;@_IRwySI%#XwOqS#%Kq^$cw~a!IkFtOKh;BAi|Vps128 zfFRE2W$1t8x81Hjj9ml7@{xk@I2lGM;zL%q0L0Qyl&CZ=V+f$ykp|8{R7h1i&$d%FelTvQ~gWg_%B#fIMsY z?MB9`WTBf+bN=RyPF4UMymQU3eIGXZ^U{>@DIapCEem`+bB*5Oz)j2CTVVt&{UguV z5hb^|ihm#}S-D-YR^qAcHs}ga+^8I~8h<=vl-Cu<_xBU_(%fWA!nmKU zu5bIce0hztrJo*a!~h7E`tsAemP_+irGC+~{rr43uJRAmnl;zZ0Z^Q2_LbHuR%w6g zpg-@t@~vrb(q}2d1wit7ZPJ(D%ss^zXgr@aQ@Q7}`$obgAE^Cp@PG8?yx|xZHh-?) zDtUnTH0NJG-TCDIT4g_ZQcDgO0LdlV;luR7BfjXU^Noji{`HA-)=B?v&*DY4*ysa} z>P%%1{!s5vsES z+}qM?$L9Wx1i-!3b2JEpRRZ9~ma-d#ukC+U34j}mUqQ~3kUlIW<4FX-y(J?)$ysX! zSfzpPZRrL7v)?6u3=MR9m7AdgV3h`Xf;)bm6 z1!%PD^@p!b)0BMrdcBg5r)m26`MKx?9w|Q9_(+qlBwz2=6T0Mb@o=9%4WlG{>n;Og zReT`v$N90}f&e&D=>B{hJSG4V^I^tI!Rz(9(ZSy4h4%0Len}TQ{xO@MSbdro0D~%? z4BTimeB}Lp=VSe;2g?WE@AouK^Z0nY>0ejB5C8xG0000000000000000DxzZ#^@pD z&dST6xE3)e1^FwoOQ z`3hjo{Kzy-T?9bhVR?xmotjH(lZCI`u25G?@f`7by}Ag1+|qSDVtFaPKPL94kOGSM59nz#pf-JxKOK z%?N-pX<8a6CUGeyNIs7MD61WO$0O&$-pgRdZ*J`8N2%Tzmx6@!99(|$r zK9g?(KroBMiWUCalo`FdZ--huf|GH)I+T}EW9z)_E>AChcL$8Kq`G$qiF)@`zBR|v z_b-#*0vdmS6=2m@0T55sNl(-}rZ~`(m4uD*`qFIo)lf@LVe@ldvgt{%ws1555 zJJ&20TZhga5CHkUW&5mCsSj@I+Q;nkk&CZ3hO>yu=>)43*xK#eli>M-`bll&7vpm{ zS3f_s^qyn5IwTlYeY*mL{ZD)KBb}K|O=r?wNt^Xl3 zBLDyZ00000000000000000000z#YjT`AdfJYhBg;disA&zazgdUkXL0FKPeL%khRn zqQzR%SG=OJB!x!fLEomv>(G;&B6HVTEhXBZQpC_Jz-paci@@3H_0k)>fT05*nP8Ne zXm5Y1xKlGj2S7r_=^i9g(%Ic10RF4*q`FpboCBOLTz8w>I=Liz=}Q|?`&#Rx2S8q| zGBmxitF?DqW|5uG zcJtQJvurzbu>vH|#TDm!i@=hwb*$$&Vm5Dn!jS7F0OC*0UfWf+j$)uFnJl^pfO>{D z9l4~~P}YG}ArVe50Z>#)7C;c^^D^{5^4o4#zl>kScK~9-ASb)^K5@JCvI4{@!Sa*! z1Ro1^k0r_bhSq_)aLSV(1xSCzP?7Ut@-m)x%*-VK4pSWc^QX^?fn==5r48>D0|MX} zLuKb&I#?^fpu$WaMnImm{B|Q_RkF~{hdF=oMh7bZ4&J%u=e`dc{dsB1_>d1d(v}52 zp1DSEap0z9?yWEamj1{yc0|c#uHp|QB`dcp)=E6J-3DC&iW`+fR)c?MjPknT`2K#v zUYa{jT?$4FfHO{5M;Q0B)%9)PmM^byw)EX&jTivIQeS?0)pBXxs?;xfwx6HR##R1? zTC?UFIsl3@&A!rF#VYMj9rWiNSH3k3PI@n8xBy7ruTA>$o4KbL1C8giW-8Zwc3()i zwv^o{d~E--N&wtg{0MTM zg!Ex48BZbr?kySdNzPg;z$y)NZ%YsOpZzTPV`!k;t6U5f0IM|66XY@QO^O}>00000 k00000000000Qwg451iKM_E8gwEdT%j07*qoM6N<$f~dbJDgXcg diff --git a/tests/data/test_pulsar_simple_pointcloud_sphere_azimuth0.0_orthographic.png b/tests/data/test_pulsar_simple_pointcloud_sphere_azimuth0.0_orthographic.png index cb6b5cec17ca25ca53641773b883a6dbd75e58e7..34c15cfdc9e26040b97136f753efadcf641438d2 100644 GIT binary patch delta 1675 zcmV;626Xv~5Bm;~B!5ClL_t(|obBDqmZT^U0MO1l{r{hw&cYgo0YuYOM7ej-r@E>i z3Iw335wAZSVVb7-*VpS6e-X>Sb5rxl=HHCZ{Vpf_bM*l{%Io!-2S@lK{?T>Eb)qEl z=QF+0$=>Eb`}cmoiyc2r)6dV-CYs_R5D0)lCBx!gn}JWg-!<=itUvW+`M~@Ao~CIY zACEWv>*@mm00000000000000000000cm`>V9%4?;Gv}p}ooYLg>@Iki-GQ}3q$|+_V(lIQXVL7tG`ztOe$QRTzw!nqs4aU z=ps0tv0Tz|s`v`CmjDPi@4_v-$}JA~k`I>v000000002sVLT7?C*li4c=v-{X2RepuJ@7&wnZ* z*Qt^=&r?D*cvF1n00`opPy8Z|6`+}`cAf&@kIUa4BzvJ|1VEWIEe#ZtxD*p4pGN?c zSqBwl%^Hp&~y(ErF`k3uAaeq#sN`X53w0ssI2000000000000000 z00000+>s2DzhoG{)>ZAVr~lXVJM#PTrBG!0lJ+0H9B(KjTC6pF#Sx7qDKr`n`ZhIQ zho0mVnY-3%DbWU%B8FZ8R_p9q1kP5km)_t2h7N#af>CCoy?>?RPR$G*00|YRdyq^? zXLpAH_^-Z`>RP>V4RE?}-)(N|XEw!c39X@39!00000007_{eLZ#be!nkyCYAxs zkKc9lLd^UizJL5YG!T*w^%4N}aW4iqQ2fr*->VO1k)6+W^VZS3Y&&$Z0wnLn73X`4 zz>=_atmimlHgA5ykn1G?;@_IRwySI%#XwOqS#%Kq^$cw~a!IkFtOKh;BAi|Vps128 zfFRE2W$1t8x81Hjj9ml7@{xk@I2lGM;zL%q0L0Qyl&CZ=V+f$ykp|8{R7h1i&$d%FelTvQ~gWg_%B#fIMsY z?MB9`WTBf+bN=RyPF4UMymQU3eIGXZ^U{>@DIapCEem`+bB*5Oz)j2CTVVt&{UguV z5hb^|ihm#}S-D-YR^qAcHs}ga+^8I~8h<=vl-Cu<_xBU_(%fWA!nmKU zu5bIce0hztrJo*a!~h7E`tsAemP_+irGC+~{rr43uJRAmnl;zZ0Z^Q2_LbHuR%w6g zpg-@t@~vrb(q}2d1wit7ZPJ(D%ss^zXgr@aQ@Q7}`$obgAE^Cp@PG8?yx|xZHh-?) zDtUnTH0NJG-TCDIT4g_ZQcDgO0LdlV;luR7BfjXU^Noji{`HA-)=B?v&*DY4*ysa} z>P%%1{!s5vsES z+}qM?$L9Wx1i-!3b2JEpRRZ9~ma-d#ukC+U34j}mUqQ~3kUlIW<4FX-y(J?)$ysX! zSfzpPZRrL7v)?6u3=MR9m7AdgV3h`Xf;)bm6 z1!%PD^@p!b)0BMrdcBg5r)m26`MKx?9w|Q9_(+qlBwz2=6T0Mb@o=9%4WlG{>n;Og zReT`v$N90}f&e&D=>B{hJSG4V^I^tI!Rz(9(ZSy4h4%0Len}TQ{xO@MSbdro0D~%? z4BTimeB}Lp=VSe;2g?WE@AouK^Z0nY>0ejB5C8xG0000000000000000DxzZ#^@pD z&dST6xE3)e1^FwoOQ z`3hjo{Kzy-T?9bhVR?xmotjH(lZCI`u25G?@f`7by}Ag1+|qSDVtFaPKPL94kOGSM59nz#pf-JxKOK z%?N-pX<8a6CUGeyNIs7MD61WO$0O&$-pgRdZ*J`8N2%Tzmx6@!99(|$r zK9g?(KroBMiWUCalo`FdZ--huf|GH)I+T}EW9z)_E>AChcL$8Kq`G$qiF)@`zBR|v z_b-#*0vdmS6=2m@0T55sNl(-}rZ~`(m4uD*`qFIo)lf@LVe@ldvgt{%ws1555 zJJ&20TZhga5CHkUW&5mCsSj@I+Q;nkk&CZ3hO>yu=>)43*xK#eli>M-`bll&7vpm{ zS3f_s^qyn5IwTlYeY*mL{ZD)KBb}K|O=r?wNt^Xl3 zBLDyZ00000000000000000000z#YjT`AdfJYhBg;disA&zazgdUkXL0FKPeL%khRn zqQzR%SG=OJB!x!fLEomv>(G;&B6HVTEhXBZQpC_Jz-paci@@3H_0k)>fT05*nP8Ne zXm5Y1xKlGj2S7r_=^i9g(%Ic10RF4*q`FpboCBOLTz8w>I=Liz=}Q|?`&#Rx2S8q| zGBmxitF?DqW|5uG zcJtQJvurzbu>vH|#TDm!i@=hwb*$$&Vm5Dn!jS7F0OC*0UfWf+j$)uFnJl^pfO>{D z9l4~~P}YG}ArVe50Z>#)7C;c^^D^{5^4o4#zl>kScK~9-ASb)^K5@JCvI4{@!Sa*! z1Ro1^k0r_bhSq_)aLSV(1xSCzP?7Ut@-m)x%*-VK4pSWc^QX^?fn==5r48>D0|MX} zLuKb&I#?^fpu$WaMnImm{B|Q_RkF~{hdF=oMh7bZ4&J%u=e`dc{dsB1_>d1d(v}52 zp1DSEap0z9?yWEamj1{yc0|c#uHp|QB`dcp)=E6J-3DC&iW`+fR)c?MjPknT`2K#v zUYa{jT?$4FfHO{5M;Q0B)%9)PmM^byw)EX&jTivIQeS?0)pBXxs?;xfwx6HR##R1? zTC?UFIsl3@&A!rF#VYMj9rWiNSH3k3PI@n8xBy7ruTA>$o4KbL1C8giW-8Zwc3()i zwv^o{d~E--N&wtg{0MTM zg!Ex48BZbr?kySdNzPg;z$y)NZ%YsOpZzTPV`!k;t6U5f0IM|66XY@QO^O}>00000 k00000000000Qwg451iKM_E8gwEdT%j07*qoM6N<$f~dbJDgXcg diff --git a/tests/data/test_pulsar_simple_pointcloud_sphere_azimuth90.0_fovorthographic.png b/tests/data/test_pulsar_simple_pointcloud_sphere_azimuth90.0_fovorthographic.png index 507fc204269ec10cf61875a1a938722094fb6fed..53ec18de39bccfc0c62392675bf6af7729b4a1b0 100644 GIT binary patch literal 2104 zcmY*ac{J1u8y-th3} zFd>Ax;2n5Yfz!=Pnw;%eQ#x^2W?75L^%%$kGqI0u9&XbeF?%XBvLk-L*NO4GYgQc3 zEpRjRuBk|;I$FJ%$qFn>RXvUUz!!*1xe6$`p4(L-cGMe)S}(IjXxKeN|3Na4c%ffi zr?=$rewj(x+jyAR1p8Y<`|Kf3JXsA6gY!q_hPbOq(p{GU_HCnt#oX1uIP#eg)A8p-MKA|B})t}j~ zZ-LcAWMMkSLF@(B98-&TkDm5P2>Xs<2uyKprRxIRX>NPhmuF1lt4oALymt&cl9e+$PUb%tIzgaN+9X_+@hOBQjOLWqN|_k5nu>-!){k6-o@QjsVZefC4}w=cSJC^il;$ zX`$nwWjw=@lQFw2qIBz+w!d#TIy?T|*KoW-or&0kxCL786=`K-r6>llvigQ$_x>4!Mc%4aik?w1`s7QL&I; z_J3nV*6z;tQ3uu)EayjkgP_-nIDUxY7stvl>G1SHr`BjqR(8)VWPFF+4RhZcOvbUM z1Z4Kilj-R2os&USlR#qX#wg_YO-`SoK%e{%`+rP+P={*!uDG=}p{jzT__r~{foG8W zm*2;hpr7y_>F)}F4d>Nu^qw%jAf2Ct7_R+zreJD|`=XJ$vEn|24NY)%F6F-CJF2o+ zTkrnCtYazqI<%xxZHEe zPeRupzCB8t*w_QcoQ&2;8sKryf(!s(k?j|;k~+V|{Y#1snx9bc zJs_)IotT1J+FN#fU%EV;H~M=#;EbYfC5iW-_65 z)kk|_LbU|`MHg*pyH}jQs1ve5?vmcupyy2CY{F@PU@^nZ4}iN;f5zGDsGkxmmKu=pi4m|cNGdJ zoZ?o`;Ju>V@tlV+u|u22BP@${R*m=nm*DhMb@q~76T?Ii=5v00oyPLcWzkcV3g(}t zC-6u3$Obpv*S6yE!-Od5;(U=>pv`x+F!PdGcG1 z(rn2Q^MZIbi7YCyayMWjt{Zrz<=HeIAY`=t~ zBbf2VXOwo8DvR8Kmsah_KPmKA+*yBu957CQ!?o%MmFId3>PlPXKR8{|xmJ##Pj#A# z;kha57QQCct~w?pDFyT*?(Cf5m3auA8!M>$%1G;wR2VlN@F}*es|avp@s++!Yrv|F zreD{4u}WS~13pWe@TfSpM(CeUQ<$h!T{Gi{Cu4kZK8pq1ejCD+TDwHq%&m$#S%EOA zdRvqOUBMRC5VzLsNo>54@#)KSG{L2jKgW=@044GVqb0%^E*dSk9VJ^5hsf^^R=0FB z@A&4$J@tUOIek$RcQ+$5Si9-Djv8fX@N#e#qfK`rV#}V*4%r6!J4e;1q|Y+4zCc~7 zQ4*BLvE-djb_uvp^k+gXi0Oo;)4LYeQ9HHvH^I#qix^)73 zK=A78A~DjOOW6Id_RVGJmhm9WwR!~VcFH53GQ+$-a8_WCHeIZw#242%L~FgmUbh_> zM^GdO+{`{0qD?9l@ngaA4t@b|tBP*l)nWhq|G$B=C3J%aL1hgc3G$ z&1N~RNX5#SliQffvSBXUe&=~TuUF4`J-GJ<|M?J=dz9}V0A!?HogDm9 zOP0oI_(}_9h&Ll@goojQ(5%&a2MA9!~fkkG((k0Cik;>OT&Fg9Q7H)-}L4}Z{SGEe|QH>m9CMV^9Rv}obF6*rz z*vx~gQ#RQ@>Ks>Dc1?mce4OXXwLYM(HJBdEu`Ihs5~(bl#KHv^i%~}T^oUpHheHCM z!Qew&>Z(=%#yh6&9+E~!SJjh_3tI#*2E!M%&nxFB_ITJ4R9Z&b1&+1Es>ZIh{(!j7 zq0jo>(3lk=5@*lg297sJVwthH@l~6E_3`%-!IQ`s@o5`#g7Ikzvt$uypSx>waGZKa zSD2K^e^VS$W|XNFjB+lVP+@a4d9-3tfvML#4sYM~lmOVOSmzm1#!4-3+Xdz8ROAdW$=_&&x@fNdwjwb6=BOIy z&s&&bj#$cGW%u|QKM|BT@BfDxI|U-) zHrpV*#SRdKjAu8l=lRDZHdza(r4c5k1oEgl;#=fZ9(5bqPn548;%%m@Pjoi8+TnC1 zGy%6?w?*`7MDcJ#7KU+!&X(hDUMzD^(jUxT@ff#|_ASAFxKVAE$a)fc^`()Ou;Llw zG1iu_1iGdTvKWLaAmO^N?{hFzT>SxxF=jliqXR=3oIranVIRLwBf9)Lq)7Jmys-ah z#BTJ=cu{C9hld*j?WsQ8w`k^)J^KmJ+5uc8?B>@Lh^{GEb^6quD|qM=^gGA`WRbiP zuVp8^`~yqFK4xGt&62EtWBXO`HLWId4g-z5AV3O z3a3nTFW}abu(Dbr;vqpWAPg(kew^Fxm~#FM;Q8X%k>v_W z%+89MC@Xe5;Y7#1z=ed#9slV4@1!?clLLPtUYXsR@Dd>edB3%!CBkERmUAwhZ7F4J zI~x%WdoW<=8OW)3DKLhqv@Le14YQN)cJ3_R&kQvB+Pb^St1VE!dTXHs4zEI-%V=}% z240mN_|jgPkk@=a_~DNhw(-2l?_#-znN|+py)_0QeXs<#4}sgADgzhE3j^*?1A`+~ z`yI5Wbh)-YmI9g>8uhW*v-nCzV(s~6kJY5?2*;T=Xuaq)BWNu^WS{+!ZFf@Gihx>2^v0RIp*h50OOe-h;awSM{c`P8<1Z^5$@us9?Ve~1IOuOyU}%(FU-1iTY9;XcyAhqFN+>xw3}d8Y zqCoP<2(3tTCV6ySv@JLap64GANNFAY3l(JiSbYd50G2g&fN# z$ZHw)BVE26OUe3lf;`Uo13UM6uLvpY_sBu%#0~xAKolB0_{92CM@6iq^CAYXWaybR zh52mDR6eVx+jlzoBj9BHb!-0dWDN})8!9gNK@dm79G2Ac4KLituY+kkDv4WOW{oZo zhJAtq#Pd^SouS-P-a{O9lBnT@I&(<;r`Mm5@$qy=B1N!$s)>Vzfs1eU`pZb9>8#)$l7{8HsW86LYHp(J76Vc$~63@!CP^WW+v-&$M2Pd2P}@;oNMeodV#Eu?%K6@V>QqIGjGi` zCWbUr6X)k%x4hlpZzBf3nawY}p5a?*2jEH^Q^LaY)+Ti)H(DeupDcHI6v<`V{zs8Z zB4<-;O+rOCiwuiQbCeWu2ORX*@2Sl_ZIAQnbImaml1O4< zH7l~SXIc*LjF2KSBejq_Ws!;=VzREu&(}l+dvlCicT-y<9X#}n|-C#{qJDp pl??f5to5{G*WCYBYk05SCcgd%x_8$!1R@84xH@|}HJlF5`Ws);45$DA diff --git a/tests/data/test_pulsar_simple_pointcloud_sphere_azimuth90.0_orthographic.png b/tests/data/test_pulsar_simple_pointcloud_sphere_azimuth90.0_orthographic.png index 507fc204269ec10cf61875a1a938722094fb6fed..53ec18de39bccfc0c62392675bf6af7729b4a1b0 100644 GIT binary patch literal 2104 zcmY*ac{J1u8y-th3} zFd>Ax;2n5Yfz!=Pnw;%eQ#x^2W?75L^%%$kGqI0u9&XbeF?%XBvLk-L*NO4GYgQc3 zEpRjRuBk|;I$FJ%$qFn>RXvUUz!!*1xe6$`p4(L-cGMe)S}(IjXxKeN|3Na4c%ffi zr?=$rewj(x+jyAR1p8Y<`|Kf3JXsA6gY!q_hPbOq(p{GU_HCnt#oX1uIP#eg)A8p-MKA|B})t}j~ zZ-LcAWMMkSLF@(B98-&TkDm5P2>Xs<2uyKprRxIRX>NPhmuF1lt4oALymt&cl9e+$PUb%tIzgaN+9X_+@hOBQjOLWqN|_k5nu>-!){k6-o@QjsVZefC4}w=cSJC^il;$ zX`$nwWjw=@lQFw2qIBz+w!d#TIy?T|*KoW-or&0kxCL786=`K-r6>llvigQ$_x>4!Mc%4aik?w1`s7QL&I; z_J3nV*6z;tQ3uu)EayjkgP_-nIDUxY7stvl>G1SHr`BjqR(8)VWPFF+4RhZcOvbUM z1Z4Kilj-R2os&USlR#qX#wg_YO-`SoK%e{%`+rP+P={*!uDG=}p{jzT__r~{foG8W zm*2;hpr7y_>F)}F4d>Nu^qw%jAf2Ct7_R+zreJD|`=XJ$vEn|24NY)%F6F-CJF2o+ zTkrnCtYazqI<%xxZHEe zPeRupzCB8t*w_QcoQ&2;8sKryf(!s(k?j|;k~+V|{Y#1snx9bc zJs_)IotT1J+FN#fU%EV;H~M=#;EbYfC5iW-_65 z)kk|_LbU|`MHg*pyH}jQs1ve5?vmcupyy2CY{F@PU@^nZ4}iN;f5zGDsGkxmmKu=pi4m|cNGdJ zoZ?o`;Ju>V@tlV+u|u22BP@${R*m=nm*DhMb@q~76T?Ii=5v00oyPLcWzkcV3g(}t zC-6u3$Obpv*S6yE!-Od5;(U=>pv`x+F!PdGcG1 z(rn2Q^MZIbi7YCyayMWjt{Zrz<=HeIAY`=t~ zBbf2VXOwo8DvR8Kmsah_KPmKA+*yBu957CQ!?o%MmFId3>PlPXKR8{|xmJ##Pj#A# z;kha57QQCct~w?pDFyT*?(Cf5m3auA8!M>$%1G;wR2VlN@F}*es|avp@s++!Yrv|F zreD{4u}WS~13pWe@TfSpM(CeUQ<$h!T{Gi{Cu4kZK8pq1ejCD+TDwHq%&m$#S%EOA zdRvqOUBMRC5VzLsNo>54@#)KSG{L2jKgW=@044GVqb0%^E*dSk9VJ^5hsf^^R=0FB z@A&4$J@tUOIek$RcQ+$5Si9-Djv8fX@N#e#qfK`rV#}V*4%r6!J4e;1q|Y+4zCc~7 zQ4*BLvE-djb_uvp^k+gXi0Oo;)4LYeQ9HHvH^I#qix^)73 zK=A78A~DjOOW6Id_RVGJmhm9WwR!~VcFH53GQ+$-a8_WCHeIZw#242%L~FgmUbh_> zM^GdO+{`{0qD?9l@ngaA4t@b|tBP*l)nWhq|G$B=C3J%aL1hgc3G$ z&1N~RNX5#SliQffvSBXUe&=~TuUF4`J-GJ<|M?J=dz9}V0A!?HogDm9 zOP0oI_(}_9h&Ll@goojQ(5%&a2MA9!~fkkG((k0Cik;>OT&Fg9Q7H)-}L4}Z{SGEe|QH>m9CMV^9Rv}obF6*rz z*vx~gQ#RQ@>Ks>Dc1?mce4OXXwLYM(HJBdEu`Ihs5~(bl#KHv^i%~}T^oUpHheHCM z!Qew&>Z(=%#yh6&9+E~!SJjh_3tI#*2E!M%&nxFB_ITJ4R9Z&b1&+1Es>ZIh{(!j7 zq0jo>(3lk=5@*lg297sJVwthH@l~6E_3`%-!IQ`s@o5`#g7Ikzvt$uypSx>waGZKa zSD2K^e^VS$W|XNFjB+lVP+@a4d9-3tfvML#4sYM~lmOVOSmzm1#!4-3+Xdz8ROAdW$=_&&x@fNdwjwb6=BOIy z&s&&bj#$cGW%u|QKM|BT@BfDxI|U-) zHrpV*#SRdKjAu8l=lRDZHdza(r4c5k1oEgl;#=fZ9(5bqPn548;%%m@Pjoi8+TnC1 zGy%6?w?*`7MDcJ#7KU+!&X(hDUMzD^(jUxT@ff#|_ASAFxKVAE$a)fc^`()Ou;Llw zG1iu_1iGdTvKWLaAmO^N?{hFzT>SxxF=jliqXR=3oIranVIRLwBf9)Lq)7Jmys-ah z#BTJ=cu{C9hld*j?WsQ8w`k^)J^KmJ+5uc8?B>@Lh^{GEb^6quD|qM=^gGA`WRbiP zuVp8^`~yqFK4xGt&62EtWBXO`HLWId4g-z5AV3O z3a3nTFW}abu(Dbr;vqpWAPg(kew^Fxm~#FM;Q8X%k>v_W z%+89MC@Xe5;Y7#1z=ed#9slV4@1!?clLLPtUYXsR@Dd>edB3%!CBkERmUAwhZ7F4J zI~x%WdoW<=8OW)3DKLhqv@Le14YQN)cJ3_R&kQvB+Pb^St1VE!dTXHs4zEI-%V=}% z240mN_|jgPkk@=a_~DNhw(-2l?_#-znN|+py)_0QeXs<#4}sgADgzhE3j^*?1A`+~ z`yI5Wbh)-YmI9g>8uhW*v-nCzV(s~6kJY5?2*;T=Xuaq)BWNu^WS{+!ZFf@Gihx>2^v0RIp*h50OOe-h;awSM{c`P8<1Z^5$@us9?Ve~1IOuOyU}%(FU-1iTY9;XcyAhqFN+>xw3}d8Y zqCoP<2(3tTCV6ySv@JLap64GANNFAY3l(JiSbYd50G2g&fN# z$ZHw)BVE26OUe3lf;`Uo13UM6uLvpY_sBu%#0~xAKolB0_{92CM@6iq^CAYXWaybR zh52mDR6eVx+jlzoBj9BHb!-0dWDN})8!9gNK@dm79G2Ac4KLituY+kkDv4WOW{oZo zhJAtq#Pd^SouS-P-a{O9lBnT@I&(<;r`Mm5@$qy=B1N!$s)>Vzfs1eU`pZb9>8#)$l7{8HsW86LYHp(J76Vc$~63@!CP^WW+v-&$M2Pd2P}@;oNMeodV#Eu?%K6@V>QqIGjGi` zCWbUr6X)k%x4hlpZzBf3nawY}p5a?*2jEH^Q^LaY)+Ti)H(DeupDcHI6v<`V{zs8Z zB4<-;O+rOCiwuiQbCeWu2ORX*@2Sl_ZIAQnbImaml1O4< zH7l~SXIc*LjF2KSBejq_Ws!;=VzREu&(}l+dvlCicT-y<9X#}n|-C#{qJDp pl??f5to5{G*WCYBYk05SCcgd%x_8$!1R@84xH@|}HJlF5`Ws);45$DA diff --git a/tests/test_camera_conversions.py b/tests/test_camera_conversions.py index cacf3487f..b6841e2aa 100644 --- a/tests/test_camera_conversions.py +++ b/tests/test_camera_conversions.py @@ -12,10 +12,12 @@ import torch from common_testing import TestCaseMixin, get_tests_dir from pytorch3d.ops import eyes +from pytorch3d.renderer.points.pulsar import Renderer as PulsarRenderer from pytorch3d.transforms import so3_exp_map, so3_log_map from pytorch3d.utils import ( cameras_from_opencv_projection, opencv_from_cameras_projection, + pulsar_from_opencv_projection, ) @@ -111,6 +113,9 @@ def test_opencv_conversion(self): [105.0, 105.0], [120.0, 120.0], ] + # These values are in y, x format, but they should be in x, y format. + # The tests work like this because they only test for consistency, + # but this format is misleading. principal_point = [ [240, 320], [240.5, 320.3], @@ -160,3 +165,80 @@ def test_opencv_conversion(self): self.assertClose(R, R_i) self.assertClose(tvec, tvec_i) self.assertClose(camera_matrix, camera_matrix_i) + + def test_pulsar_conversion(self): + """ + Tests that the cameras converted from opencv to pulsar convention + return correct projections of random 3D points. The check is done + against a set of results precomputed using `cv2.projectPoints` function. + """ + image_size = [[480, 640]] + R = [ + [ + [1.0, 0.0, 0.0], + [0.0, 1.0, 0.0], + [0.0, 0.0, 1.0], + ], + [ + [0.1968, -0.6663, -0.7192], + [0.7138, -0.4055, 0.5710], + [-0.6721, -0.6258, 0.3959], + ], + ] + tvec = [ + [10.0, 10.0, 3.0], + [-0.0, -0.0, 20.0], + ] + focal_length = [ + [100.0, 100.0], + [10.0, 10.0], + ] + principal_point = [ + [320, 240], + [320, 240], + ] + + principal_point, focal_length, R, tvec, image_size = [ + torch.FloatTensor(x) + for x in (principal_point, focal_length, R, tvec, image_size) + ] + camera_matrix = eyes(dim=3, N=2) + camera_matrix[:, 0, 0] = focal_length[:, 0] + camera_matrix[:, 1, 1] = focal_length[:, 1] + camera_matrix[:, :2, 2] = principal_point + rvec = so3_log_map(R) + pts = torch.tensor( + [[[0.0, 0.0, 120.0]], [[0.0, 0.0, 120.0]]], dtype=torch.float32 + ) + radii = torch.tensor([[1e-5], [1e-5]], dtype=torch.float32) + col = torch.zeros((2, 1, 1), dtype=torch.float32) + + # project the 3D points with the opencv projection function + pts_proj_opencv = cv2_project_points(pts, rvec, tvec, camera_matrix) + pulsar_cam = pulsar_from_opencv_projection( + R, tvec, camera_matrix, image_size, znear=100.0 + ) + pulsar_rend = PulsarRenderer( + 640, 480, 1, right_handed_system=False, n_channels=1 + ) + rendered = torch.flip( + pulsar_rend( + pts, + col, + radii, + pulsar_cam, + 1e-5, + max_depth=150.0, + min_depth=100.0, + ), + dims=(1,), + ) + for batch_id in range(2): + point_pos = torch.where(rendered[batch_id] == rendered[batch_id].min()) + point_pos = point_pos[1][0], point_pos[0][0] + self.assertLess( + torch.abs(point_pos[0] - pts_proj_opencv[batch_id, 0, 0]), 2 + ) + self.assertLess( + torch.abs(point_pos[1] - pts_proj_opencv[batch_id, 0, 1]), 2 + )