Visual Servoing Platform version 3.7.0
Loading...
Searching...
No Matches
mbot-apriltag-2D-half-vs.cpp
1
2#include <visp3/core/vpConfig.h>
3#include <visp3/core/vpPolygon.h>
4#include <visp3/core/vpSerial.h>
5#include <visp3/core/vpXmlParserCamera.h>
6#include <visp3/detection/vpDetectorAprilTag.h>
7#include <visp3/gui/vpDisplayFactory.h>
8#include <visp3/io/vpImageIo.h>
9#include <visp3/robot/vpUnicycle.h>
10#include <visp3/sensor/vpV4l2Grabber.h>
11#include <visp3/visual_features/vpFeatureBuilder.h>
12#include <visp3/visual_features/vpFeatureDepth.h>
13#include <visp3/visual_features/vpFeaturePoint.h>
14#include <visp3/vs/vpServo.h>
15
16void usage(const char **argv, int error)
17{
18 std::cout << "Synopsis" << std::endl
19 << " " << argv[0]
20 << " [--camera-device <id>]"
21 << " [--tag-size <size>]"
22 << " [--tag-family <family>]"
23 << " [--tag-decision-margin-threshold <threshold>]"
24 << " [--tag-hamming-distance-threshold <threshold>]"
25 << " [--tag-quad-decimate <factor>]"
26 << " [--tag-n-threads <number>]"
27 << " [--tag-pose-method <method>]"
28#if defined(VISP_HAVE_PUGIXML)
29 << " [--intrinsic <xmlfile>]"
30 << " [--camera-name <name>]"
31#endif
32#if defined(VISP_HAVE_DISPLAY)
33 << " [--display-tag]"
34 << " [--display-on]"
35 << " [--save-image>]"
36#endif
37 << " [--serial-off]"
38 << " [--help, -h]" << std::endl
39 << std::endl;
40 std::cout << "Description" << std::endl
41 << " 2D-half visual servoing using an Apriltag." << std::endl
42 << std::endl
43 << " --camera-device <id>" << std::endl
44 << " Camera device id." << std::endl
45 << " Default: 0" << std::endl
46 << std::endl
47 << " --tag-size <size>" << std::endl
48 << " Apriltag size in [m]." << std::endl
49 << " Default: 0.03" << std::endl
50 << std::endl
51 << " --tag-family <family>" << std::endl
52 << " Apriltag family. Supported values are:" << std::endl
53 << " 0: TAG_36h11" << std::endl
54 << " 1: TAG_36h10 (DEPRECATED)" << std::endl
55 << " 2: TAG_36ARTOOLKIT (DEPRECATED)" << std::endl
56 << " 3: TAG_25h9" << std::endl
57 << " 4: TAG_25h7 (DEPRECATED)" << std::endl
58 << " 5: TAG_16h5" << std::endl
59 << " 6: TAG_CIRCLE21h7" << std::endl
60 << " 7: TAG_CIRCLE49h12" << std::endl
61 << " 8: TAG_CUSTOM48h12" << std::endl
62 << " 9: TAG_STANDARD41h12" << std::endl
63 << " 10: TAG_STANDARD52h13" << std::endl
64 << " 11: TAG_ARUCO_4x4_50" << std::endl
65 << " 12: TAG_ARUCO_4x4_100" << std::endl
66 << " 13: TAG_ARUCO_4x4_250" << std::endl
67 << " 14: TAG_ARUCO_4x4_1000" << std::endl
68 << " 15: TAG_ARUCO_5x5_50" << std::endl
69 << " 16: TAG_ARUCO_5x5_100" << std::endl
70 << " 17: TAG_ARUCO_5x5_250" << std::endl
71 << " 18: TAG_ARUCO_5x5_1000" << std::endl
72 << " 19: TAG_ARUCO_6x6_50" << std::endl
73 << " 20: TAG_ARUCO_6x6_100" << std::endl
74 << " 21: TAG_ARUCO_6x6_250" << std::endl
75 << " 22: TAG_ARUCO_6x6_1000" << std::endl
76 << " 23: TAG_ARUCO_7x7_50" << std::endl
77 << " 24: TAG_ARUCO_7x7_100" << std::endl
78 << " 25: TAG_ARUCO_7x7_250" << std::endl
79 << " 26: TAG_ARUCO_7x7_1000" << std::endl
80 << " 27: TAG_ARUCO_MIP_36h12" << std::endl
81 << " Default: 0 (36h11)" << std::endl
82 << std::endl
83 << " --tag-decision-margin-threshold <threshold>" << std::endl
84 << " Threshold used to discard low-confident detections. A typical value is " << std::endl
85 << " around 100. The higher this value, the more false positives will be filtered" << std::endl
86 << " out. When this value is set to -1, false positives are not filtered out." << std::endl
87 << " Default: 50" << std::endl
88 << std::endl
89 << " --tag-hamming-distance-threshold <threshold>" << std::endl
90 << " Threshold used to discard low-confident detections with corrected bits." << std::endl
91 << " A typical value is between 0 and 3. The lower this value, the more false" << std::endl
92 << " positives will be filtered out." << std::endl
93 << " Default: 0" << std::endl
94 << std::endl
95 << " --tag-quad-decimate <factor>" << std::endl
96 << " Decimation factor used to detect a tag. " << std::endl
97 << " Default: 1" << std::endl
98 << std::endl
99 << " --tag-n-threads <number>" << std::endl
100 << " Number of threads used to detect a tag." << std::endl
101 << " Default: 1" << std::endl
102 << std::endl
103#if defined(VISP_HAVE_PUGIXML)
104 << " --intrinsic <xmlfile>" << std::endl
105 << " Camera intrinsic parameters file in xml format." << std::endl
106 << " Default: empty" << std::endl
107 << std::endl
108 << " --camera-name <name>" << std::endl
109 << " Camera name in the intrinsic parameters file in xml format." << std::endl
110 << " Default: empty" << std::endl
111 << std::endl
112#endif
113#if defined(VISP_HAVE_DISPLAY)
114 << " --display-tag" << std::endl
115 << " Flag used to enable displaying the edges of a tag." << std::endl
116 << " Default: disabled" << std::endl
117 << std::endl
118 << " --display-on" << std::endl
119 << " Flag used to turn display on." << std::endl
120 << " Default: disabled" << std::endl
121 << std::endl
122 << " --save-image" << std::endl
123 << " Flag used to save images with overlay drawings." << std::endl
124 << " Default: disabled" << std::endl
125 << std::endl
126#endif
127 << " --serial-off" << std::endl
128 << " Flag used to disable serial link." << std::endl
129 << " Default: enabled" << std::endl
130 << std::endl
131 << " --without-pose-computation" << std::endl
132 << " Flag used to disable tag pose estimation." << std::endl
133 << " Default: enabled" << std::endl
134 << std::endl
135 << " --help, -h" << std::endl
136 << " Print this helper message." << std::endl
137 << std::endl;
138
139 if (error) {
140 std::cout << "Error" << std::endl
141 << " "
142 << "Unsupported parameter " << argv[error] << std::endl;
143 }
144}
145
146int main(int argc, const char **argv)
147{
148#if defined(VISP_HAVE_APRILTAG) && defined(VISP_HAVE_V4L2)
149#ifdef ENABLE_VISP_NAMESPACE
150 using namespace VISP_NAMESPACE_NAME;
151#endif
152
153 int device = 0;
156 double opt_tag_size = 0.065;
157 float opt_tag_quad_decimate = 4.0;
158 float opt_tag_decision_margin_threshold = 50;
159 float opt_tag_hamming_distance_threshold = 2;
160 int opt_tag_nThreads = 2;
161 std::string intrinsic_file = "";
162 std::string camera_name = "";
163 bool display_tag = false;
164 bool display_on = false;
165 bool serial_off = false;
166 bool use_pose = true;
167 bool save_image = false; // Only possible if display_on = true
168
169 for (int i = 1; i < argc; i++) {
170 if (std::string(argv[i]) == "--camera-device" && i + 1 < argc) {
171 device = std::atoi(argv[++i]);
172 }
173 else if (std::string(argv[i]) == "--tag-size" && i + 1 < argc) {
174 opt_tag_size = std::atof(argv[++i]);
175 }
176 else if (std::string(argv[i]) == "--tag-family" && i + 1 < argc) {
177 opt_tag_family = (vpDetectorAprilTag::vpAprilTagFamily)atoi(argv[++i]);
178 }
179 else if (std::string(argv[i]) == "--tag-decision-margin-threshold" && i + 1 < argc) {
180 opt_tag_decision_margin_threshold = static_cast<float>(atof(argv[++i]));
181 }
182 else if (std::string(argv[i]) == "--tag-hamming-distance-threshold" && i + 1 < argc) {
183 opt_tag_hamming_distance_threshold = atoi(argv[++i]);
184 }
185 else if (std::string(argv[i]) == "--tag-quad-decimate" && i + 1 < argc) {
186 opt_tag_quad_decimate = static_cast<float>(atof(argv[++i]));
187 }
188 else if (std::string(argv[i]) == "--tag-n-threads" && i + 1 < argc) {
189 opt_tag_nThreads = std::atoi(argv[++i]);
190 }
191#if defined(VISP_HAVE_PUGIXML)
192 else if (std::string(argv[i]) == "--intrinsic" && i + 1 < argc) {
193 intrinsic_file = std::string(argv[++i]);
194 }
195 else if (std::string(argv[i]) == "--camera-name" && i + 1 < argc) {
196 camera_name = std::string(argv[++i]);
197 }
198#endif
199#if defined(VISP_HAVE_DISPLAY)
200 else if (std::string(argv[i]) == "--display-tag") {
201 display_tag = true;
202 }
203 else if (std::string(argv[i]) == "--display-on") {
204 display_on = true;
205 }
206 else if (std::string(argv[i]) == "--save-image") {
207 save_image = true;
208#endif
209 }
210 else if (std::string(argv[i]) == "--serial-off") {
211 serial_off = true;
212 }
213 else if (std::string(argv[i]) == "--without-pose-computation") {
214 use_pose = false;
215 }
216 else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") {
217 usage(argv, 0);
218 return EXIT_SUCCESS;
219 }
220 else {
221 usage(argv, i);
222 return EXIT_FAILURE;
223 }
224 }
225
226 // Me Auriga led ring
227 // if serial com ok: led 1 green
228 // if exception: led 1 red
229 // if tag detected: led 2 green, else led 2 red
230 // if motor left: led 3 blue
231 // if motor right: led 4 blue
232
233 vpSerial *serial = nullptr;
234 if (!serial_off) {
235 serial = new vpSerial("/dev/ttyAMA0", 115200);
236
237 serial->write("LED_RING=0,0,0,0\n"); // Switch off all led
238 serial->write("LED_RING=1,0,10,0\n"); // Switch on led 1 to green: serial ok
239 }
240
241 try {
243
245 std::ostringstream device_name;
246 device_name << "/dev/video" << device;
247 g.setDevice(device_name.str());
248 g.setScale(1);
249 g.acquire(I);
250
251 vpDisplay *d = nullptr;
253#ifdef VISP_HAVE_DISPLAY
254 if (display_on) {
256 }
257#endif
258
260 cam.initPersProjWithoutDistortion(615.1674805, 615.1675415, I.getWidth() / 2., I.getHeight() / 2.);
262 if (!intrinsic_file.empty() && !camera_name.empty())
263 parser.parse(cam, intrinsic_file, camera_name, vpCameraParameters::perspectiveProjWithoutDistortion);
264
265 std::cout << cam << std::endl;
266 std::cout << "Tag detector settings" << std::endl;
267 std::cout << " Tag size [m] : " << opt_tag_size << std::endl;
268 std::cout << " Tag family : " << opt_tag_family << std::endl;
269 std::cout << " Quad decimate : " << opt_tag_quad_decimate << std::endl;
270 std::cout << " Decision margin threshold : " << opt_tag_decision_margin_threshold << std::endl;
271 std::cout << " Hamming distance threshold: " << opt_tag_hamming_distance_threshold << std::endl;
272 std::cout << " Num threads : " << opt_tag_nThreads << std::endl;
273 std::cout << " Pose estimation : " << opt_tag_pose_estimation_method << std::endl;
274
275 vpDetectorAprilTag detector(opt_tag_family);
276
277 detector.setAprilTagQuadDecimate(opt_tag_quad_decimate);
278 if (use_pose) {
279 detector.setAprilTagPoseEstimationMethod(opt_tag_pose_estimation_method);
280 }
281 detector.setAprilTagNbThreads(opt_tag_nThreads);
282 detector.setDisplayTag(display_tag);
283 detector.setAprilTagDecisionMarginThreshold(opt_tag_decision_margin_threshold);
284 detector.setAprilTagHammingDistanceThreshold(opt_tag_hamming_distance_threshold);
285
287 vpAdaptiveGain lambda;
288 if (display_on)
289 lambda.initStandard(2.5, 0.4, 30); // lambda(0)=2.5, lambda(oo)=0.4 and lambda'(0)=30
290 else
291 lambda.initStandard(4, 0.4, 30); // lambda(0)=4, lambda(oo)=0.4 and lambda'(0)=30
292
293 vpUnicycle robot;
295 task.setInteractionMatrixType(vpServo::CURRENT, vpServo::PSEUDO_INVERSE);
296 task.setLambda(lambda);
298 cRe[0][0] = 0;
299 cRe[0][1] = -1;
300 cRe[0][2] = 0;
301 cRe[1][0] = 0;
302 cRe[1][1] = 0;
303 cRe[1][2] = -1;
304 cRe[2][0] = 1;
305 cRe[2][1] = 0;
306 cRe[2][2] = 0;
307
309 vpVelocityTwistMatrix cVe(cMe);
310 task.set_cVe(cVe);
311
312 vpMatrix eJe(6, 2, 0);
313 eJe[0][0] = eJe[5][1] = 1.0;
314
315 std::cout << "eJe: \n" << eJe << std::endl;
316
317 // Current and desired visual feature associated to the x coordinate of the point
318 vpFeaturePoint s_x, s_xd;
319 vpImagePoint cog;
320 double Z, Z_d;
321 Z = Z_d = 0.4;
322
323 // Create the current x visual feature
324 vpFeatureBuilder::create(s_x, cam, cog);
325
326 // Create the desired x* visual feature
327 s_xd.buildFrom(0, 0, Z_d);
328
329 // Add the point feature
330 task.addFeature(s_x, s_xd, vpFeaturePoint::selectX());
331
332 // Create the log(Z/Z*) visual feature
333 vpFeatureDepth s_Z, s_Z_d;
334
335 std::cout << "Z " << Z << std::endl;
336 s_Z.buildFrom(s_x.get_x(), s_x.get_y(), Z, 0); // log(Z/Z*) = 0 that's why the last parameter is 0
337 s_Z_d.buildFrom(0, 0, Z_d, 0); // The value of s* is 0 with Z=1 meter
338
339 // Add the feature
340 task.addFeature(s_Z, s_Z_d);
341
342 std::vector<double> time_vec;
343 for (;;) {
344 g.acquire(I);
345
347
348 double t = vpTime::measureTimeMs();
349 std::vector<vpHomogeneousMatrix> cMo_vec;
350 if (use_pose)
351 detector.detect(I, opt_tag_size, cam, cMo_vec);
352 else
353 detector.detect(I);
354
356 time_vec.push_back(t);
357
358 {
359 std::stringstream ss;
360 ss << "Detection time: " << t << " ms";
361 vpDisplay::displayText(I, 40, 20, ss.str(), vpColor::red);
362 }
363
364 if (detector.getNbObjects() == 1) {
365 // Display visual features
366 vpHomogeneousMatrix cdMo(0, 0, Z_d, 0, 0, 0);
367 vpDisplay::displayFrame(I, cdMo, cam, opt_tag_size / 3, vpColor::red, 3);
368 vpDisplay::displayCross(I, detector.getCog(0), 15, vpColor::green,
369 3); // Current polygon used to compure an moment
370 vpDisplay::displayLine(I, 0, cam.get_u0(), I.getHeight() - 1, cam.get_u0(), vpColor::red,
371 3); // Vertical line as desired x position
372 if (use_pose) {
373 // Display visual features
374 vpDisplay::displayFrame(I, cMo_vec[0], cam, opt_tag_size / 2, vpColor::none, 3);
375 }
376
377 if (!serial_off) {
378 serial->write("LED_RING=2,0,10,0\n"); // Switch on led 2 to green: tag detected
379 }
380
381 if (use_pose) {
382 Z = cMo_vec[0][2][3];
383 }
384 else {
385 vpPolygon polygon(detector.getPolygon(0));
386 double surface = polygon.getArea();
387 std::cout << "Surface: " << surface << std::endl;
388
389 // Compute the distance from target surface and 3D size
390 Z = opt_tag_size * cam.get_px() / sqrt(surface);
391 }
392
393 vpFeatureBuilder::create(s_x, cam, detector.getCog(0));
394 s_x.set_Z(Z);
395
396 // Update log(Z/Z*) feature
397 s_Z.buildFrom(s_x.get_x(), s_x.get_y(), Z, log(Z / Z_d));
398
399 std::cout << "cog: " << detector.getCog(0) << " Z: " << Z << std::endl;
400
401 task.set_cVe(cVe);
402 task.set_eJe(eJe);
403
404 // Compute the control law. Velocities are computed in the mobile robot reference frame
405 vpColVector v = task.computeControlLaw();
406
407 std::cout << "Send velocity to the mbot: " << v[0] << " m/s " << vpMath::deg(v[1]) << " deg/s" << std::endl;
408
409 task.print();
410 double radius = 0.0325;
411 double L = 0.0725;
412 double motor_left = (-v[0] - L * v[1]) / radius;
413 double motor_right = (v[0] - L * v[1]) / radius;
414 std::cout << "motor left vel: " << motor_left << " motor right vel: " << motor_right << std::endl;
415 if (!serial_off) {
416 // serial->write("LED_RING=3,0,0,10\n"); // Switch on led 3 to blue: motor left servoed
417 // serial->write("LED_RING=4,0,0,10\n"); // Switch on led 4 to blue: motor right servoed
418 }
419 std::stringstream ss;
420 double rpm_left = motor_left * 30. / M_PI;
421 double rpm_right = motor_right * 30. / M_PI;
422 ss << "MOTOR_RPM=" << vpMath::round(rpm_left) << "," << vpMath::round(rpm_right) << "\n";
423 std::cout << "Send: " << ss.str() << std::endl;
424 if (!serial_off) {
425 serial->write(ss.str());
426 }
427 }
428 else {
429 // stop the robot
430 if (!serial_off) {
431 serial->write("LED_RING=2,10,0,0\n"); // Switch on led 2 to red: tag not detected
432 // serial->write("LED_RING=3,0,0,0\n"); // Switch on led 3 to blue: motor left not servoed
433 // serial->write("LED_RING=4,0,0,0\n"); // Switch on led 4 to blue: motor right not servoed
434 serial->write("MOTOR_RPM=0,-0\n"); // Stop the robot
435 }
436 }
437
438 vpDisplay::displayText(I, 20, 20, "Click to quit.", vpColor::red);
440 if (display_on && save_image) {
442 vpImageIo::write(O, "image.png");
443 }
444 if (vpDisplay::getClick(I, false))
445 break;
446 }
447
448 if (!serial_off) {
449 serial->write("LED_RING=0,0,0,0\n"); // Switch off all led
450 }
451
452 std::cout << "Benchmark computation time" << std::endl;
453 std::cout << "Mean / Median / Std: " << vpMath::getMean(time_vec) << " ms"
454 << " ; " << vpMath::getMedian(time_vec) << " ms"
455 << " ; " << vpMath::getStdev(time_vec) << " ms" << std::endl;
456
457 if (display_on)
458 delete d;
459 if (!serial_off) {
460 delete serial;
461 }
462 }
463 catch (const vpException &e) {
464 std::cerr << "Catch an exception: " << e.getMessage() << std::endl;
465 if (!serial_off) {
466 serial->write("LED_RING=1,10,0,0\n"); // Switch on led 1 to red
467 }
468 }
469
470 return EXIT_SUCCESS;
471#else
472 (void)argc;
473 (void)argv;
474#ifndef VISP_HAVE_APRILTAG
475 std::cout << "ViSP is not build with Apriltag support" << std::endl;
476#endif
477#ifndef VISP_HAVE_V4L2
478 std::cout << "ViSP is not build with v4l2 support" << std::endl;
479#endif
480 std::cout << "Install missing 3rd parties, configure and build ViSP to run this tutorial" << std::endl;
481 return EXIT_SUCCESS;
482#endif
483}
Adaptive gain computation.
void initStandard(double gain_at_zero, double gain_at_infinity, double slope_at_zero)
Generic class defining intrinsic camera parameters.
@ perspectiveProjWithoutDistortion
Perspective projection without distortion model.
Implementation of column vector and the associated operations.
static const vpColor red
Definition vpColor.h:198
static const vpColor none
Definition vpColor.h:210
static const vpColor green
Definition vpColor.h:201
@ TAG_36h11
AprilTag 36h11 pattern (recommended).
Class that defines generic functionalities for display.
Definition vpDisplay.h:171
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
static void displayFrame(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, double size, const vpColor &color=vpColor::none, unsigned int thickness=1, const vpImagePoint &offset=vpImagePoint(0, 0), const std::string &frameName="", const vpColor &textColor=vpColor::black, const vpImagePoint &textOffset=vpImagePoint(15, 15))
static void getImage(const vpImage< unsigned char > &Is, vpImage< vpRGBa > &Id)
static void displayCross(const vpImage< unsigned char > &I, const vpImagePoint &ip, unsigned int size, const vpColor &color, unsigned int thickness=1)
static void flush(const vpImage< unsigned char > &I)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
Definition vpException.h:60
static void create(vpFeaturePoint &s, const vpCameraParameters &cam, const vpDot &d)
Class that defines a 3D point visual feature which is composed by one parameters that is that defin...
vpFeatureDepth & buildFrom(const double &x, const double &y, const double &Z, const double &LogZoverZstar)
Class that defines a 2D point visual feature which is composed by two parameters that are the cartes...
vpFeaturePoint & buildFrom(const double &x, const double &y, const double &Z)
static unsigned int selectX()
double get_y() const
double get_x() const
void set_Z(double Z)
Implementation of an homogeneous matrix and operations on such kind of matrices.
static void write(const vpImage< unsigned char > &I, const std::string &filename, int backend=IO_DEFAULT_BACKEND)
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition of the vpImage class member functions.
Definition vpImage.h:131
static double getMedian(const std::vector< double > &v)
Definition vpMath.cpp:343
static double getStdev(const std::vector< double > &v, bool useBesselCorrection=false)
Definition vpMath.cpp:374
static int round(double x)
Definition vpMath.h:413
static double getMean(const std::vector< double > &v)
Definition vpMath.cpp:323
static double deg(double rad)
Definition vpMath.h:119
Implementation of a matrix and operations on matrices.
Definition vpMatrix.h:175
Defines a generic 2D polygon.
Definition vpPolygon.h:103
Implementation of a rotation matrix and operations on such kind of matrices.
void write(const std::string &s)
Definition vpSerial.cpp:332
@ EYEINHAND_L_cVe_eJe
Definition vpServo.h:183
@ PSEUDO_INVERSE
Definition vpServo.h:250
@ CURRENT
Definition vpServo.h:217
Class that consider the case of a translation vector.
Generic functions for unicycle mobile robots.
Definition vpUnicycle.h:52
Class that is a wrapper over the Video4Linux2 (V4L2) driver.
void setScale(unsigned scale=vpV4l2Grabber::DEFAULT_SCALE)
void setDevice(const std::string &devname)
void acquire(vpImage< unsigned char > &I)
XML parser to load and save intrinsic camera parameters.
vpDisplay * allocateDisplay()
Return a newly allocated vpDisplay specialization if a GUI library is available or nullptr otherwise.
VISP_EXPORT double measureTimeMs()