|
| 1 | +"""Set of unit tests for testing inference example for CosyPose.""" |
| 2 | + |
| 3 | +import numpy as np |
| 4 | +import pandas as pd |
| 5 | +import pinocchio as pin |
| 6 | +import pytest |
| 7 | +import torch |
| 8 | + |
| 9 | +from happypose.pose_estimators.cosypose.cosypose.config import LOCAL_DATA_DIR |
| 10 | +from happypose.pose_estimators.megapose.inference.icp_refiner import ICPRefiner |
| 11 | +from happypose.toolbox.datasets.bop_object_datasets import ( |
| 12 | + RigidObject, |
| 13 | + RigidObjectDataset, |
| 14 | +) |
| 15 | +from happypose.toolbox.inference.example_inference_utils import load_observation_example |
| 16 | +from happypose.toolbox.inference.types import ObservationTensor, PoseEstimatesType |
| 17 | +from happypose.toolbox.lib3d.rigid_mesh_database import MeshDataBase |
| 18 | +from happypose.toolbox.renderer.panda3d_batch_renderer import ( |
| 19 | + Panda3dBatchRenderer, |
| 20 | +) |
| 21 | + |
| 22 | + |
| 23 | +class TestCosyPoseInference: |
| 24 | + """Unit tests for CosyPose inference example.""" |
| 25 | + |
| 26 | + @pytest.fixture(autouse=True) |
| 27 | + def setUp(self) -> None: |
| 28 | + """Run detector with coarse and refiner from CosyPose.""" |
| 29 | + self.expected_object_label = "hope-obj_000002" |
| 30 | + mesh_file_name = "hope-obj_000002.ply" |
| 31 | + data_dir = LOCAL_DATA_DIR / "examples" / "barbecue-sauce" |
| 32 | + mesh_dir = data_dir / "meshes" |
| 33 | + mesh_path = mesh_dir / mesh_file_name |
| 34 | + |
| 35 | + self.coarse_run_id = "coarse-bop-hope-pbr--225203" |
| 36 | + self.refiner_run_id = "refiner-bop-hope-pbr--955392" |
| 37 | + |
| 38 | + rgb, depth, camera_data = load_observation_example(data_dir, load_depth=True) |
| 39 | + self.observation = ObservationTensor.from_numpy( |
| 40 | + rgb, depth=depth, K=camera_data.K |
| 41 | + ) |
| 42 | + |
| 43 | + self.object_dataset = RigidObjectDataset( |
| 44 | + objects=[ |
| 45 | + RigidObject( |
| 46 | + label=self.expected_object_label, |
| 47 | + mesh_path=mesh_path, |
| 48 | + mesh_units="mm", |
| 49 | + ) |
| 50 | + ] |
| 51 | + ) |
| 52 | + mesh_db = MeshDataBase.from_object_ds(self.object_dataset) |
| 53 | + self.mesh_db_batched = mesh_db.batched().cpu() |
| 54 | + |
| 55 | + def test_icp_refiner(self): |
| 56 | + renderer = Panda3dBatchRenderer( |
| 57 | + self.object_dataset, |
| 58 | + n_workers=1, |
| 59 | + preload_cache=False, |
| 60 | + ) |
| 61 | + |
| 62 | + depth_refiner = ICPRefiner(self.mesh_db_batched, renderer) |
| 63 | + |
| 64 | + # hardcoded pose: obtained from the output of single rgb prediction |
| 65 | + T_in = pin.SE3( |
| 66 | + pin.exp3(np.array([1.4, 1.6, -1.11])), |
| 67 | + np.array([0.1, 0.07, 0.45]), |
| 68 | + ) |
| 69 | + |
| 70 | + poses_in = torch.from_numpy(T_in.homogeneous)[np.newaxis, :, :] |
| 71 | + preds_cosy = PoseEstimatesType( |
| 72 | + infos=pd.DataFrame( |
| 73 | + {"batch_im_id": [0], "label": ["hope-obj_000002"], "score": [1.0]} |
| 74 | + ), |
| 75 | + poses=poses_in, |
| 76 | + poses_input=poses_in, |
| 77 | + ) |
| 78 | + |
| 79 | + preds, _ = depth_refiner.refine_poses( |
| 80 | + predictions=preds_cosy, depth=self.observation.depth, K=self.observation.K |
| 81 | + ) |
| 82 | + |
| 83 | + T_est = pin.SE3(preds.poses[0].numpy()) |
| 84 | + diff = T_est.inverse() * T_in |
| 85 | + assert np.linalg.norm(pin.log6(diff).vector) < 0.1 |
0 commit comments