Visual Servoing Platform version 3.7.0
Loading...
Searching...
No Matches
catchCalibHandEye.cpp
1/*
2 * ViSP, open source Visual Servoing Platform software.
3 * Copyright (C) 2005 - 2025 by Inria. All rights reserved.
4 *
5 * This software is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation; either version 2 of the License, or
8 * (at your option) any later version.
9 * See the file LICENSE.txt at the root directory of this source
10 * distribution for additional information about the GNU GPL.
11 *
12 * For using ViSP with software that can not be combined with the GNU
13 * GPL, please contact Inria about acquiring a ViSP Professional
14 * Edition License.
15 *
16 * See https://visp.inria.fr for more information.
17 *
18 * This software was developed at:
19 * Inria Rennes - Bretagne Atlantique
20 * Campus Universitaire de Beaulieu
21 * 35042 Rennes Cedex
22 * France
23 *
24 * If you have questions regarding the use of this file, please contact
25 * Inria at visp@inria.fr
26 *
27 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
28 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
29 *
30 * Description:
31 * Eye-in-hand calibration test to estimate hand to eye transformation.
32 */
33
40#include <iostream>
41
42#include <visp3/core/vpConfig.h>
43
44#if defined(VISP_HAVE_CATCH2)
45
46#include <visp3/core/vpExponentialMap.h>
47#include <visp3/vision/vpHandEyeCalibration.h>
48
49#include <catch_amalgamated.hpp>
50
51#if defined(ENABLE_VISP_NAMESPACE)
52using namespace VISP_NAMESPACE_NAME;
53#endif
54
55bool homogeneous_equal(const vpHomogeneousMatrix &M1, const vpHomogeneousMatrix &M2)
56{
57 bool equal = true;
62 for (unsigned int i = 0; i < 3; ++i) {
63 if (!vpMath::equal(t1[i], t2[i])) {
64 std::cout << "Error: Translation " << i << " differ " << std::endl;
65 equal = false;
66 }
67 if (!vpMath::equal(tu1[i], tu2[i])) {
68 std::cout << "Error: Theta-u axis-angle rotation " << i << " differ" << std::endl;
69 equal = false;
70 }
71 }
72 return equal;
73}
74
75SCENARIO("Eye-in-hand calibration", "[eye-in-hand]")
76{
77 GIVEN("Eye-in-hand data")
78 {
79 std::cout << "-- First part: Eye-in-hand configuration -- " << std::endl;
80 // We want to calibrate the hand-eye extrinsic camera parameters from 6 couple of poses: cMo and rMe
81 const unsigned int N = 6;
82 // Input: six couple of poses used as input in the calibration process
83 // - eye (camera) to object transformation. The object frame is attached to the calibration grid
84 std::vector<vpHomogeneousMatrix> cMo(N);
85 // - robot reference to hand (end-effector) transformation
86 std::vector<vpHomogeneousMatrix> rMe(N);
87 // Output: Result of the calibration
88 // - ground truth hand (end-effector) to eye (camera) transformation
90 // - ground truth robot reference to object transformation
92
93 // Initialize eMc and rMo transformations used to produce the simulated input transformations cMo and rMe
94 vpTranslationVector etc_gt(0.1, 0.2, 0.3);
95 vpThetaUVector erc_gt;
96 erc_gt[0] = vpMath::rad(10); // 10 deg
97 erc_gt[1] = vpMath::rad(-10); // -10 deg
98 erc_gt[2] = vpMath::rad(25); // 25 deg
99
100 eMc_gt.buildFrom(etc_gt, erc_gt);
101 std::cout << "Ground truth hand-eye transformation: eMc " << std::endl;
102 std::cout << eMc_gt << std::endl;
103 std::cout << "Theta U rotation: "
104 << vpMath::deg(erc_gt[0]) << " "
105 << vpMath::deg(erc_gt[1]) << " "
106 << vpMath::deg(erc_gt[2])
107 << std::endl;
108
109 vpTranslationVector rto_gt(-0.1, 0.2, 0.5);
110 vpThetaUVector rwo_gt;
111 rwo_gt[0] = vpMath::rad(-10); // -10 deg
112 rwo_gt[1] = vpMath::rad(10); // 10 deg
113 rwo_gt[2] = vpMath::rad(30); // 30 deg
114
115 rMo_gt.buildFrom(rto_gt, rwo_gt);
116 std::cout << "Ground truth robot reference-object: rMo " << std::endl;
117 std::cout << rMo_gt << std::endl;
118 std::cout << "Theta U rotation: "
119 << vpMath::deg(rwo_gt[0]) << " "
120 << vpMath::deg(rwo_gt[1]) << " "
121 << vpMath::deg(rwo_gt[2])
122 << std::endl;
123
124 vpColVector v_e(6); // end effector velocity used to produce 6 simulated poses
125 for (unsigned int i = 0; i < N; ++i) {
126 v_e = 0;
127 if (i == 0) {
128 // Initialize first poses
129 rMe[0].buildFrom(0, 0, 0, 0, 0, 0); // Id
130 }
131 else if (i == 1) {
132 v_e[3] = M_PI / 8;
133 }
134 else if (i == 2) {
135 v_e[4] = M_PI / 8;
136 }
137 else if (i == 3) {
138 v_e[5] = M_PI / 10;
139 }
140 else if (i == 4) {
141 v_e[0] = 0.5;
142 }
143 else if (i == 5) {
144 v_e[1] = 0.8;
145 }
146
147 vpHomogeneousMatrix eMe; // end effector displacement
148 eMe = vpExponentialMap::direct(v_e); // Compute the end effectot displacement
149 // due to the velocity applied to
150 // the end effector
151 if (i > 0) {
152 // From the end effector displacement eMe, compute the rMe matrix
153 rMe[i] = rMe[i - 1] * eMe;
154 }
155 // Deduce the cMo corresponding matrix
156 cMo[i] = eMc_gt.inverse() * rMe[i].inverse() * rMo_gt;
157 }
158
159 if (0) {
160 for (unsigned int i = 0; i < N; ++i) {
162 rMo = rMe[i] * eMc_gt * cMo[i];
163 std::cout << std::endl << "rMo[" << i << "] " << std::endl;
164 std::cout << rMo << std::endl;
165 std::cout << "cMo[" << i << "] " << std::endl;
166 std::cout << cMo[i] << std::endl;
167 std::cout << "rMe[" << i << "] " << std::endl;
168 std::cout << rMe[i] << std::endl;
169 }
170 }
171
172 WHEN("Estimating eMc and rMo")
173 {
174 // robot end-effector to camera frames transformation to estimate
176 // Robot reference to object transformation frames to estimate
178
179 // Compute eMc and rMo from six poses
180 // - cMo[6]: camera to object poses as six homogeneous transformations
181 // - rMe[6]: robot reference to hand (end-effector) poses as six homogeneous transformations
182 CHECK(vpHandEyeCalibration::calibrate(cMo, rMe, eMc, rMo) == 0);
183 CHECK(homogeneous_equal(eMc, eMc_gt));
184 CHECK(homogeneous_equal(rMo, rMo_gt));
185 }
186 WHEN("Estimating eMc")
187 {
188 // robot end-effector to camera frames transformation to estimate
190
191 // Compute eMc hand to eye transformation from six poses
192 // - cMo[6]: camera to object poses as six homogeneous transformations
193 // - rMe[6]: robot reference to hand (end-effector) poses as six homogeneous transformations
194 CHECK(vpHandEyeCalibration::calibrate(cMo, rMe, eMc) == 0);
195 CHECK(homogeneous_equal(eMc, eMc_gt));
196 }
197 }
198}
199
200SCENARIO("Eye-to-hand calibration", "[eye-to-hand]")
201{
202 GIVEN("Eye-to-hand data")
203 {
204 std::cout << "\n-- Second part: Eye-to-hand configuration -- " << std::endl;
205 // We want to calibrate the transformation from the robot reference frame
206 // to the camera frame rMc using an object rigidly attached to the
207 // end effector that is observed by the camera, as well as the
208 // transformation eMo from the end effector frame to the camera frame
209 // using 6 couples of poses: cMo and rMe. This corresponds to the eye-to-hand
210 // configuration
211 const unsigned int N = 6;
212 // Input: six couple of poses used as input in the calibration process
213 std::vector<vpHomogeneousMatrix> oMc(N); //object to camera transformation. The object
214 // frame is attached to the calibration grid
215
216 std::vector<vpHomogeneousMatrix> rMe(N); // robot reference to end-effector transformation
217 // Output: Result of the calibration
218 vpHomogeneousMatrix eMo_gt; // end-effector to object transformation
219 vpHomogeneousMatrix rMc_gt; // robot reference to camera transformation
220
221 // Initialize eMo and rMc transformations used to produce the simulated input
222 // transformations cMo and rMe
223 vpTranslationVector eto_gt(0.2, -0.1, 0.15);
224 vpThetaUVector ero_gt;
225 ero_gt[0] = vpMath::rad(-15); // -15 deg
226 ero_gt[1] = vpMath::rad(10); // 10 deg
227 ero_gt[2] = vpMath::rad(-25); // -25 deg
228
229 eMo_gt.buildFrom(eto_gt, ero_gt);
230 std::cout << "Ground truth end effector to objet transformation : eMo " << std::endl;
231 std::cout << eMo_gt << std::endl;
232 std::cout << "Theta U rotation: " << vpMath::deg(ero_gt[0]) << " " << vpMath::deg(ero_gt[1]) << " " << vpMath::deg(ero_gt[2])
233 << std::endl;
234
235 vpTranslationVector rtc_gt(1.0, 0.0, 0.0);
236 vpThetaUVector rrc_gt;
237 rrc_gt[0] = vpMath::rad(10); // 10 deg
238 rrc_gt[1] = vpMath::rad(20); // 20 deg
239 rrc_gt[2] = vpMath::rad(90); // 90 deg
240
241 rMc_gt.buildFrom(rtc_gt, rrc_gt);
242 std::cout << "Ground truth robot reference to camera transformation: rMc " << std::endl;
243 std::cout << rMc_gt << std::endl;
244 std::cout << "Theta U rotation: " << vpMath::deg(rrc_gt[0]) << " " << vpMath::deg(rrc_gt[1]) << " " << vpMath::deg(rrc_gt[2])
245 << std::endl;
246
247 vpColVector v_e(6); // end-effector velocity used to produce 6 simulated poses
248 for (unsigned int i = 0; i < N; ++i) {
249 v_e = 0;
250 if (i == 0) {
251 // Initialize first poses
252 rMe[0].buildFrom(0.1, -0.1, 1.0, 0.1, -0.2, 0.3); // general pose
253 }
254 else if (i == 1) {
255 v_e[3] = M_PI / 4;
256 }
257 else if (i == 2) {
258 v_e[4] = -M_PI / 8;
259 }
260 else if (i == 3) {
261 v_e[5] = M_PI / 3;
262 }
263 else if (i == 4) {
264 v_e[0] = -0.5;
265 }
266 else if (i == 5) {
267 v_e[1] = 0.4;
268 }
269
270 vpHomogeneousMatrix eMe; // end-effector displacement
271 eMe = vpExponentialMap::direct(v_e); // Compute the end effector displacement
272 // due to the velocity applied to
273 // the end effector
274 if (i > 0) {
275 // From the end effector displacement eMe, compute the rMe matrix
276 rMe[i] = rMe[i - 1] * eMe;
277 }
278 // Deduce the cMo and oMc matrices
279 vpHomogeneousMatrix cMo = rMc_gt.inverse() * rMe[i] * eMo_gt;
280 oMc[i] = cMo.inverse();
281 }
282
283 if (0) {
284 for (unsigned int i = 0; i < N; ++i) {
285 std::cout << "rMe[" << i << "] " << std::endl;
286 std::cout << rMe[i] << std::endl;
287 std::cout << "cMo[" << i << "] " << std::endl;
288 std::cout << oMc[i].inverse() << std::endl;
289 }
290 }
291
292 WHEN("Estimating eMo and rMc")
293 {
294 // Robot end-effector to object frames transformation to estimate
296 // Robot reference to camera frames transformation to estimate
298
299 // Compute eMo and rMc from six poses
300 // - cMo[6]: camera to object poses as six homogeneous transformations
301 // - rMe[6]: robot reference to hand (end-effector) poses as six homogeneous transformations
302 CHECK(vpHandEyeCalibration::calibrate(oMc, rMe, eMo, rMc) == 0);
303 CHECK(homogeneous_equal(eMo, eMo_gt));
304 CHECK(homogeneous_equal(rMc, rMc_gt));
305 }
306 WHEN("Estimating eMo")
307 {
308 // Robot end-effector to object frames transformation to estimate
310
311 // Compute eMo hand to object transformation from six poses
312 // - cMo[6]: camera to object poses as six homogeneous transformations
313 // - rMe[6]: robot reference to hand (end-effector) poses as six homogeneous transformations
314 CHECK(vpHandEyeCalibration::calibrate(oMc, rMe, eMo) == 0);
315 CHECK(homogeneous_equal(eMo, eMo_gt));
316 }
317 }
318}
319
320int main(int argc, char *argv[])
321{
322 Catch::Session session;
323 session.applyCommandLine(argc, argv);
324 int numFailed = session.run();
325 return numFailed;
326}
327
328#else
329int main()
330{
331 std::cout << "This test needs catch2 that is not enabled..." << std::endl;
332}
333#endif
Implementation of column vector and the associated operations.
static vpHomogeneousMatrix direct(const vpColVector &v)
static int calibrate(const std::vector< vpHomogeneousMatrix > &cMo, const std::vector< vpHomogeneousMatrix > &rMe, vpHomogeneousMatrix &eMc, vpHomogeneousMatrix &rMo)
Implementation of an homogeneous matrix and operations on such kind of matrices.
vpThetaUVector getThetaUVector() const
vpHomogeneousMatrix & buildFrom(const vpTranslationVector &t, const vpRotationMatrix &R)
vpHomogeneousMatrix inverse() const
vpTranslationVector getTranslationVector() const
static double rad(double deg)
Definition vpMath.h:129
static bool equal(double x, double y, double threshold=0.001)
Definition vpMath.h:470
static double deg(double rad)
Definition vpMath.h:119
Implementation of a rotation vector as axis-angle minimal representation.
Class that consider the case of a translation vector.