Visual Servoing Platform version 3.7.0
Loading...
Searching...
No Matches
tutorial-mb-generic-tracker-live.cpp
1
2#include <iostream>
3
4#include <visp3/core/vpConfig.h>
5
7// #undef VISP_HAVE_V4L2
8// #undef VISP_HAVE_DC1394
9// #undef VISP_HAVE_CMU1394
10// #undef VISP_HAVE_FLYCAPTURE
11// #undef VISP_HAVE_REALSENSE2
12// #undef HAVE_OPENCV_HIGHGUI
13// #undef HAVE_OPENCV_VIDEOIO
15
16#if (defined(VISP_HAVE_V4L2) || defined(VISP_HAVE_DC1394) || defined(VISP_HAVE_CMU1394) || \
17 defined(VISP_HAVE_FLYCAPTURE) || defined(VISP_HAVE_REALSENSE2) || defined(VISP_HAVE_OPENCV) && \
18 (((VISP_HAVE_OPENCV_VERSION < 0x030000) && defined(HAVE_OPENCV_HIGHGUI)) || \
19 ((VISP_HAVE_OPENCV_VERSION >= 0x030000) && defined(HAVE_OPENCV_VIDEOIO)))) && \
20 defined(VISP_HAVE_DISPLAY) && defined(VISP_HAVE_OPENCV) && \
21 (((VISP_HAVE_OPENCV_VERSION < 0x050000) && defined(HAVE_OPENCV_CALIB3D) && defined(HAVE_OPENCV_FEATURES2D)) || \
22 ((VISP_HAVE_OPENCV_VERSION >= 0x050000) && defined(HAVE_OPENCV_3D) && defined(HAVE_OPENCV_FEATURES)))
23
24#ifdef VISP_HAVE_MODULE_SENSOR
25#include <visp3/sensor/vp1394CMUGrabber.h>
26#include <visp3/sensor/vp1394TwoGrabber.h>
27#include <visp3/sensor/vpFlyCaptureGrabber.h>
28#include <visp3/sensor/vpRealSense2.h>
29#include <visp3/sensor/vpV4l2Grabber.h>
30#endif
31#include <visp3/core/vpIoTools.h>
32#include <visp3/core/vpXmlParserCamera.h>
33#include <visp3/gui/vpDisplayFactory.h>
34#include <visp3/io/vpImageIo.h>
35#include <visp3/vision/vpKeyPoint.h>
37#include <visp3/mbt/vpMbGenericTracker.h>
39
40#if (VISP_HAVE_OPENCV_VERSION < 0x030000) && defined(HAVE_OPENCV_HIGHGUI)
41#include <opencv2/highgui/highgui.hpp> // for cv::VideoCapture
42#elif (VISP_HAVE_OPENCV_VERSION >= 0x030000) && defined(HAVE_OPENCV_VIDEOIO)
43#include <opencv2/videoio/videoio.hpp> // for cv::VideoCapture
44#endif
45
46int main(int argc, char **argv)
47{
48#ifdef ENABLE_VISP_NAMESPACE
49 using namespace VISP_NAMESPACE_NAME;
50#endif
51
52#if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
53 std::shared_ptr<vpDisplay> display;
54#else
55 vpDisplay *display = nullptr;
56#endif
57
58 try {
59 std::string opt_modelname = "model/teabox/teabox.cao";
60 int opt_tracker = 2;
61 int opt_device = 0; // For OpenCV and V4l2 grabber to set the camera device
62 double opt_proj_error_threshold = 30.;
63 bool opt_use_ogre = false;
64 bool opt_use_scanline = false;
65 bool opt_display_projection_error = false;
66 bool opt_learn = false;
67 bool opt_auto_init = false;
68 std::string opt_learning_data = "learning/data-learned.bin";
69 std::string opt_intrinsic_file = "";
70 std::string opt_camera_name = "";
71
72 for (int i = 1; i < argc; i++) {
73 if (std::string(argv[i]) == "--model" && i + 1 < argc) {
74 opt_modelname = std::string(argv[++i]);
75 }
76 else if (std::string(argv[i]) == "--tracker" && i + 1 < argc) {
77 opt_tracker = atoi(argv[++i]);
78 }
79 else if (std::string(argv[i]) == "--camera-device" && i + 1 < argc) {
80 opt_device = atoi(argv[++i]);
81 }
82 else if (std::string(argv[i]) == "--max_proj_error" && i + 1 < argc) {
83 opt_proj_error_threshold = atof(argv[++i]);
84 }
85 else if (std::string(argv[i]) == "--use_ogre") {
86 opt_use_ogre = true;
87 }
88 else if (std::string(argv[i]) == "--use_scanline") {
89 opt_use_scanline = true;
90 }
91 else if (std::string(argv[i]) == "--learn") {
92 opt_learn = true;
93 }
94 else if (std::string(argv[i]) == "--learning_data" && i + 1 < argc) {
95 opt_learning_data = argv[++i];
96 }
97 else if (std::string(argv[i]) == "--auto_init") {
98 opt_auto_init = true;
99 }
100 else if (std::string(argv[i]) == "--display_proj_error") {
101 opt_display_projection_error = true;
102 }
103 else if (std::string(argv[i]) == "--intrinsic" && i + 1 < argc) {
104 opt_intrinsic_file = std::string(argv[++i]);
105 }
106 else if (std::string(argv[i]) == "--camera-name" && i + 1 < argc) {
107 opt_camera_name = std::string(argv[++i]);
108 }
109 else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") {
110 std::cout
111 << "\nUsage: " << argv[0] << " [--camera-device <camera device> (default: 0)]"
112 << " [--intrinsic <intrinsic file> (default: empty)]"
113 << " [--camera-name <camera name> (default: empty)]"
114 << " [--model <model name> (default: teabox)]"
115 << " [--tracker <0=egde|1=keypoint|2=hybrid> (default: 2)]"
116 << " [--use_ogre] [--use_scanline]"
117 << " [--max_proj_error <allowed projection error> (default: 30)]"
118 << " [--learn]"
119 << " [--auto_init]"
120 << " [--learning_data <data-learned.bin> (default: learning/data-learned.bin)]"
121 << " [--display_proj_error]"
122 << " [--help] [-h]\n"
123 << std::endl;
124 return EXIT_SUCCESS;
125 }
126 }
127 std::string parentname = vpIoTools::getParent(opt_modelname);
128 std::string objectname = vpIoTools::getNameWE(opt_modelname);
129
130 if (!parentname.empty())
131 objectname = parentname + "/" + objectname;
132
133 std::cout << "Tracker requested config files: " << objectname << ".[init, cao]" << std::endl;
134 std::cout << "Tracker optional config files: " << objectname << ".[ppm]" << std::endl;
135
136 std::cout << "Tracked features: " << std::endl;
137 std::cout << " Use edges : " << (opt_tracker == 0 || opt_tracker == 2) << std::endl;
138 std::cout << " Use klt : " << (opt_tracker == 1 || opt_tracker == 2) << std::endl;
139 std::cout << "Tracker options: " << std::endl;
140 std::cout << " Use ogre : " << opt_use_ogre << std::endl;
141 std::cout << " Use scanline: " << opt_use_scanline << std::endl;
142 std::cout << " Proj. error : " << opt_proj_error_threshold << std::endl;
143 std::cout << " Display proj. error: " << opt_display_projection_error << std::endl;
144 std::cout << "Config files: " << std::endl;
145 std::cout << " Config file : "
146 << "\"" << objectname + ".xml"
147 << "\"" << std::endl;
148 std::cout << " Model file : "
149 << "\"" << objectname + ".cao"
150 << "\"" << std::endl;
151 std::cout << " Init file : "
152 << "\"" << objectname + ".init"
153 << "\"" << std::endl;
154 std::cout << "Learning options : " << std::endl;
155 std::cout << " Learn : " << opt_learn << std::endl;
156 std::cout << " Auto init : " << opt_auto_init << std::endl;
157 std::cout << " Learning data: " << opt_learning_data << std::endl;
158
160#if VISP_VERSION_INT > VP_VERSION_INT(3, 2, 0)
161 vpImage<vpRGBa> I; // Since ViSP 3.2.0 we support model-based tracking on color images
162#else
163 vpImage<unsigned char> I; // Tracking on gray level images
164#endif
166
169 cam.initPersProjWithoutDistortion(839, 839, 325, 243);
171
172#if defined(VISP_HAVE_PUGIXML)
174 if (!opt_intrinsic_file.empty() && !opt_camera_name.empty()) {
175 parser.parse(cam, opt_intrinsic_file, opt_camera_name, vpCameraParameters::perspectiveProjWithoutDistortion);
176 }
177#endif
178
182
184#if defined(VISP_HAVE_V4L2)
186 std::ostringstream device;
187 device << "/dev/video" << opt_device;
188 std::cout << "Use Video 4 Linux grabber on device " << device.str() << std::endl;
189 g.setDevice(device.str());
190 g.setScale(1);
191 g.open(I);
192#elif defined(VISP_HAVE_DC1394)
193 (void)opt_device; // To avoid non used warning
194 std::cout << "Use DC1394 grabber" << std::endl;
196 g.open(I);
197#elif defined(VISP_HAVE_CMU1394)
198 (void)opt_device; // To avoid non used warning
199 std::cout << "Use CMU1394 grabber" << std::endl;
201 g.open(I);
202#elif defined(VISP_HAVE_FLYCAPTURE)
203 (void)opt_device; // To avoid non used warning
204 std::cout << "Use FlyCapture grabber" << std::endl;
206 g.open(I);
207#elif defined(VISP_HAVE_REALSENSE2)
208 (void)opt_device; // To avoid non used warning
209 std::cout << "Use Realsense 2 grabber" << std::endl;
210 vpRealSense2 g;
211 rs2::config config;
212 config.disable_stream(RS2_STREAM_DEPTH);
213 config.disable_stream(RS2_STREAM_INFRARED);
214 config.enable_stream(RS2_STREAM_COLOR, 640, 480, RS2_FORMAT_RGBA8, 30);
215 g.open(config);
216 g.acquire(I);
217
218 std::cout << "Read camera parameters from Realsense device" << std::endl;
220#elif defined(VISP_HAVE_OPENCV) && \
221 (((VISP_HAVE_OPENCV_VERSION < 0x030000) && defined(HAVE_OPENCV_HIGHGUI)) || \
222 ((VISP_HAVE_OPENCV_VERSION >= 0x030000) && defined(HAVE_OPENCV_VIDEOIO)))
223 std::cout << "Use OpenCV grabber on device " << opt_device << std::endl;
224 cv::VideoCapture g(opt_device); // Open the default camera
225 if (!g.isOpened()) { // Check if we succeeded
226 std::cout << "Failed to open the camera" << std::endl;
227 return EXIT_FAILURE;
228 }
229 cv::Mat frame;
230 g >> frame; // get a new frame from camera
231 vpImageConvert::convert(frame, I);
232#endif
234
235#if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
237#else
239#endif
240 display->init(I, 100, 100, "Model-based tracker");
241
242 while (true) {
243#if defined(VISP_HAVE_V4L2) || defined(VISP_HAVE_DC1394) || defined(VISP_HAVE_CMU1394) || defined(VISP_HAVE_FLYCAPTURE) || defined(VISP_HAVE_REALSENSE2)
244 g.acquire(I);
245#elif defined(VISP_HAVE_OPENCV) && \
246 (((VISP_HAVE_OPENCV_VERSION < 0x030000) && defined(HAVE_OPENCV_HIGHGUI)) || \
247 ((VISP_HAVE_OPENCV_VERSION >= 0x030000) && defined(HAVE_OPENCV_VIDEOIO)))
248 g >> frame;
249 vpImageConvert::convert(frame, I);
250#endif
251
253 vpDisplay::displayText(I, 20, 20, "Click when ready.", vpColor::red);
255
256 if (vpDisplay::getClick(I, false)) {
257 break;
258 }
259 }
260
263 if (opt_tracker == 0)
265#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
266 else if (opt_tracker == 1)
268 else
270#else
271 else {
272#if !defined(VISP_HAVE_MODULE_KLT)
273 std::cout << "klt and hybrid model-based tracker are not available since visp_klt module is not available. "
274 "In CMakeGUI turn visp_klt module ON, configure and build ViSP again."
275 << std::endl;
276#else
277 std::cout << "Hybrid tracking is impossible since OpenCV is not enabled. "
278 << "Install OpenCV, configure and build ViSP again to run this tutorial." << std::endl;
279#endif
280 return EXIT_SUCCESS;
281 }
282#endif
284
285 bool usexml = false;
287#if defined(VISP_HAVE_PUGIXML)
288 if (vpIoTools::checkFilename(objectname + ".xml")) {
289 tracker.loadConfigFile(objectname + ".xml");
290 usexml = true;
291 }
292#endif
294
295 if (!usexml) {
297 if (opt_tracker == 0 || opt_tracker == 2) {
299 vpMe me;
300 me.setMaskSize(5);
301 me.setMaskNumber(180);
302 me.setRange(8);
304 me.setThreshold(20);
305 me.setMu1(0.5);
306 me.setMu2(0.5);
307 me.setSampleStep(4);
308 tracker.setMovingEdge(me);
310 }
311
312#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
313 if (opt_tracker == 1 || opt_tracker == 2) {
315 vpKltOpencv klt_settings;
316 klt_settings.setMaxFeatures(300);
317 klt_settings.setWindowSize(5);
318 klt_settings.setQuality(0.015);
319 klt_settings.setMinDistance(8);
320 klt_settings.setHarrisFreeParameter(0.01);
321 klt_settings.setBlockSize(3);
322 klt_settings.setPyramidLevels(3);
323 tracker.setKltOpencv(klt_settings);
324 tracker.setKltMaskBorder(5);
326 }
327#endif
328 }
329
330 tracker.setCameraParameters(cam);
332
334 tracker.loadModel(objectname + ".cao");
337 tracker.setDisplayFeatures(true);
340 tracker.setOgreVisibilityTest(opt_use_ogre);
341 tracker.setScanLineVisibilityTest(opt_use_scanline);
344 tracker.setProjectionErrorComputation(true);
345 tracker.setProjectionErrorDisplay(opt_display_projection_error);
347
348#if defined(VISP_HAVE_OPENCV) && \
349 (((VISP_HAVE_OPENCV_VERSION < 0x050000) && defined(HAVE_OPENCV_XFEATURES2D)) || \
350 ((VISP_HAVE_OPENCV_VERSION >= 0x050000) && defined(HAVE_OPENCV_FEATURES)))
351 std::string detectorName = "SIFT";
352 std::string extractorName = "SIFT";
353 std::string matcherName = "BruteForce";
354#elif defined(VISP_HAVE_OPENCV) && \
355 (((VISP_HAVE_OPENCV_VERSION < 0x050000) && defined(HAVE_OPENCV_FEATURES2D)) || \
356 ((VISP_HAVE_OPENCV_VERSION >= 0x050000) && defined(HAVE_OPENCV_FEATURES)))
357 std::string detectorName = "FAST";
358 std::string extractorName = "ORB";
359 std::string matcherName = "BruteForce-Hamming";
360#endif
361 vpKeyPoint keypoint;
362 if (opt_learn || opt_auto_init) {
363 keypoint.setDetector(detectorName);
364 keypoint.setExtractor(extractorName);
365 keypoint.setMatcher(matcherName);
366#if defined(VISP_HAVE_OPENCV) && \
367 (((VISP_HAVE_OPENCV_VERSION < 0x050000) && defined(HAVE_OPENCV_FEATURES2D)) || \
368 ((VISP_HAVE_OPENCV_VERSION >= 0x050000) && defined(HAVE_OPENCV_FEATURES)))
369#if (VISP_HAVE_OPENCV_VERSION < 0x030000)
370 keypoint.setDetectorParameter("ORB", "nLevels", 1);
371#else
372 cv::Ptr<cv::ORB> orb_detector = keypoint.getDetector("ORB").dynamicCast<cv::ORB>();
373 if (orb_detector) {
374 orb_detector->setNLevels(1);
375 }
376#endif
377#endif
378 }
379
380 if (opt_auto_init) {
381 if (!vpIoTools::checkFilename(opt_learning_data)) {
382 std::cout << "Cannot enable auto detection. Learning file \"" << opt_learning_data << "\" doesn't exist"
383 << std::endl;
384 return EXIT_FAILURE;
385 }
386 keypoint.loadLearningData(opt_learning_data, true);
387 }
388 else {
389 tracker.initClick(I, objectname + ".init", true);
390 }
391
392 bool learn_position = false;
393 bool run_auto_init = false;
394 if (opt_auto_init) {
395 run_auto_init = true;
396 }
397
398 // To be able to display keypoints matching with test-detection-rs2
399 int learn_id = 1;
400 unsigned int learn_cpt = 0;
401 bool quit = false;
402 bool tracking_failed = false;
403
404 while (!quit) {
405 double t_begin = vpTime::measureTimeMs();
406#if defined(VISP_HAVE_V4L2) || defined(VISP_HAVE_DC1394) || defined(VISP_HAVE_CMU1394) || defined(VISP_HAVE_FLYCAPTURE) || defined(VISP_HAVE_REALSENSE2)
407 g.acquire(I);
408#elif defined(VISP_HAVE_OPENCV) && \
409 (((VISP_HAVE_OPENCV_VERSION < 0x030000) && defined(HAVE_OPENCV_HIGHGUI)) || \
410 ((VISP_HAVE_OPENCV_VERSION >= 0x030000) && defined(HAVE_OPENCV_VIDEOIO)))
411 g >> frame;
412 vpImageConvert::convert(frame, I);
413#endif
415
416 // Run auto initialization from learned data
417 if (run_auto_init) {
418 tracking_failed = false;
419 if (keypoint.matchPoint(I, cam, cMo)) {
420 std::cout << "Auto init succeed" << std::endl;
421 tracker.initFromPose(I, cMo);
422 }
423 else {
425 continue;
426 }
427 }
428 else if (tracking_failed) {
429 // Manual init
430 tracking_failed = false;
431 tracker.initClick(I, objectname + ".init", true);
432 }
433
434 // Run the tracker
435 try {
436 if (run_auto_init) {
437 // Turn display features off just after auto init to not display wrong moving-edge if the tracker fails
438 tracker.setDisplayFeatures(false);
439
440 run_auto_init = false;
441 }
442 tracker.track(I);
443 }
444 catch (const vpException &e) {
445 std::cout << "Tracker exception: " << e.getStringMessage() << std::endl;
446 tracking_failed = true;
447 if (opt_auto_init) {
448 std::cout << "Tracker needs to restart (tracking exception)" << std::endl;
449 run_auto_init = true;
450 }
451 }
452
453 if (!tracking_failed) {
454 double proj_error = 0;
455 if (tracker.getTrackerType() & vpMbGenericTracker::EDGE_TRACKER) {
456 // Check tracking errors
457 proj_error = tracker.getProjectionError();
458 }
459 else {
460 tracker.getPose(cMo);
461 tracker.getCameraParameters(cam);
462 proj_error = tracker.computeCurrentProjectionError(I, cMo, cam);
463 }
464 if (proj_error > opt_proj_error_threshold) {
465 std::cout << "Tracker needs to restart (projection error detected: " << proj_error << ")" << std::endl;
466 if (opt_auto_init) {
467 run_auto_init = true;
468 }
469 tracking_failed = true;
470 }
471 }
472
473 if (!tracking_failed) {
474 tracker.setDisplayFeatures(true);
476 tracker.getPose(cMo);
479 tracker.getCameraParameters(cam);
480 tracker.display(I, cMo, cam, vpColor::green, 2, false);
482 vpDisplay::displayFrame(I, cMo, cam, 0.025, vpColor::none, 3);
483
484 { // Display estimated pose in [m] and [deg]
485 vpPoseVector pose(cMo);
486 std::stringstream ss;
487 ss << "Translation: " << std::setprecision(5) << pose[0] << " " << pose[1] << " " << pose[2] << " [m]";
488 vpDisplay::displayText(I, 80, 20, ss.str(), vpColor::green);
489 ss.str(""); // erase ss
490 ss << "Rotation tu: " << std::setprecision(4) << vpMath::deg(pose[3]) << " " << vpMath::deg(pose[4]) << " "
491 << vpMath::deg(pose[5]) << " [deg]";
492 vpDisplay::displayText(I, 100, 20, ss.str(), vpColor::green);
493 }
494 {
495 std::stringstream ss;
496 ss << "Features: edges " << tracker.getNbFeaturesEdge() << ", klt " << tracker.getNbFeaturesKlt();
497 vpDisplay::displayText(I, 120, 20, ss.str(), vpColor::red);
498 }
499 }
500
501 if (learn_position) {
502 learn_cpt++;
503 // Detect keypoints on the current image
504 std::vector<cv::KeyPoint> trainKeyPoints;
505 keypoint.detect(I, trainKeyPoints);
506
507 // Keep only keypoints on the cube
508 std::vector<vpPolygon> polygons;
509 std::vector<std::vector<vpPoint> > roisPt;
510 std::pair<std::vector<vpPolygon>, std::vector<std::vector<vpPoint> > > pair = tracker.getPolygonFaces();
511 polygons = pair.first;
512 roisPt = pair.second;
513
514 // Compute the 3D coordinates
515 std::vector<cv::Point3f> points3f;
516 vpKeyPoint::compute3DForPointsInPolygons(cMo, cam, trainKeyPoints, polygons, roisPt, points3f);
517
518 // Build the reference keypoints
519 keypoint.buildReference(I, trainKeyPoints, points3f, true, learn_id++);
520
521 // Display learned data
522 for (std::vector<cv::KeyPoint>::const_iterator it = trainKeyPoints.begin(); it != trainKeyPoints.end(); ++it) {
523 vpDisplay::displayCross(I, static_cast<int>(it->pt.y), static_cast<int>(it->pt.x), 10, vpColor::yellow, 3);
524 }
525 learn_position = false;
526 std::cout << "Data learned" << std::endl;
527 }
528
529 std::stringstream ss;
530 ss << "Loop time: " << vpTime::measureTimeMs() - t_begin << " ms";
531 vpDisplay::displayText(I, 20, 20, ss.str(), vpColor::red);
532 if (opt_learn)
533 vpDisplay::displayText(I, 35, 20, "Left click: learn Right click: quit", vpColor::red);
534 else if (opt_auto_init)
535 vpDisplay::displayText(I, 35, 20, "Left click: auto_init Right click: quit", vpColor::red);
536 else
537 vpDisplay::displayText(I, 35, 20, "Right click: quit", vpColor::red);
538
540 if (vpDisplay::getClick(I, button, false)) {
541 if (button == vpMouseButton::button3) {
542 quit = true;
543 }
544 else if (button == vpMouseButton::button1 && opt_learn) {
545 learn_position = true;
546 }
547 else if (button == vpMouseButton::button1 && opt_auto_init && !opt_learn) {
548 run_auto_init = true;
549 }
550 }
551
553 }
554 if (opt_learn && learn_cpt) {
555 std::cout << "Save learning from " << learn_cpt << " images in file: " << opt_learning_data << std::endl;
556 keypoint.saveLearningData(opt_learning_data, true, true);
557 }
558 }
559 catch (const vpException &e) {
560 std::cout << "Catch a ViSP exception: " << e << std::endl;
561 }
562
564#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
565 if (display != nullptr) {
566 delete display;
567 }
568#endif
570}
571
572#else
573
574int main()
575{
576#if defined(VISP_HAVE_OPENCV)
577 std::cout << "Install a 3rd party dedicated to frame grabbing (dc1394, cmu1394, v4l2, OpenCV, FlyCapture, "
578 << "Realsense2), configure and build ViSP again to use this tutorial."
579 << std::endl;
580#else
581 std::cout << "Install OpenCV 3rd party, configure and build ViSP again to use this tutorial." << std::endl;
582#endif
583 return EXIT_SUCCESS;
584}
585
586#endif
Firewire cameras video capture based on CMU 1394 Digital Camera SDK.
void open(vpImage< unsigned char > &I)
Class for firewire ieee1394 video devices using libdc1394-2.x api.
void open(vpImage< unsigned char > &I)
Generic class defining intrinsic camera parameters.
@ perspectiveProjWithoutDistortion
Perspective projection without distortion model.
static const vpColor red
Definition vpColor.h:198
static const vpColor none
Definition vpColor.h:210
static const vpColor yellow
Definition vpColor.h:206
static const vpColor green
Definition vpColor.h:201
Class that defines generic functionalities for display.
Definition vpDisplay.h:171
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayFrame(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, double size, const vpColor &color=vpColor::none, unsigned int thickness=1, const vpImagePoint &offset=vpImagePoint(0, 0), const std::string &frameName="", const vpColor &textColor=vpColor::black, const vpImagePoint &textOffset=vpImagePoint(15, 15))
static void displayCross(const vpImage< unsigned char > &I, const vpImagePoint &ip, unsigned int size, const vpColor &color, unsigned int thickness=1)
static void flush(const vpImage< unsigned char > &I)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
Definition vpException.h:60
void open(vpImage< unsigned char > &I)
Implementation of an homogeneous matrix and operations on such kind of matrices.
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
Definition of the vpImage class member functions.
Definition vpImage.h:131
static bool checkFilename(const std::string &filename)
static std::string getNameWE(const std::string &pathname)
static std::string getParent(const std::string &pathname)
Class that allows keypoints 2D features detection (and descriptors extraction) and matching thanks to...
Definition vpKeyPoint.h:274
unsigned int buildReference(const vpImage< unsigned char > &I) VP_OVERRIDE
void setExtractor(const vpFeatureDescriptorType &extractorType)
void loadLearningData(const std::string &filename, bool binaryMode=false, bool append=false)
void detect(const vpImage< unsigned char > &I, std::vector< cv::KeyPoint > &keyPoints, const vpRect &rectangle=vpRect())
static void compute3DForPointsInPolygons(const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, std::vector< cv::KeyPoint > &candidates, const std::vector< vpPolygon > &polygons, const std::vector< std::vector< vpPoint > > &roisPt, std::vector< cv::Point3f > &points, cv::Mat *descriptors=nullptr)
void setMatcher(const std::string &matcherName)
void saveLearningData(const std::string &filename, bool binaryMode=false, bool saveTrainingImages=true)
unsigned int matchPoint(const vpImage< unsigned char > &I) VP_OVERRIDE
void setDetector(const vpFeatureDetectorType &detectorType)
cv::Ptr< cv::FeatureDetector > getDetector(const vpFeatureDetectorType &type) const
Wrapper for the KLT (Kanade-Lucas-Tomasi) feature tracker implemented in OpenCV. Thus to enable this ...
Definition vpKltOpencv.h:83
void setBlockSize(int blockSize)
void setQuality(double qualityLevel)
void setHarrisFreeParameter(double harris_k)
void setMaxFeatures(int maxCount)
void setMinDistance(double minDistance)
void setWindowSize(int winSize)
void setPyramidLevels(int pyrMaxLevel)
static double deg(double rad)
Definition vpMath.h:119
Real-time 6D object pose tracking using its CAD model.
Definition vpMe.h:143
void setMu1(const double &mu_1)
Definition vpMe.h:408
void setRange(const unsigned int &range)
Definition vpMe.h:438
void setLikelihoodThresholdType(const vpLikelihoodThresholdType likelihood_threshold_type)
Definition vpMe.h:531
void setMaskNumber(const unsigned int &mask_number)
Definition vpMe.cpp:555
void setThreshold(const double &threshold)
Definition vpMe.h:489
void setSampleStep(const double &sample_step)
Definition vpMe.h:445
void setMaskSize(const unsigned int &mask_size)
Definition vpMe.cpp:563
void setMu2(const double &mu_2)
Definition vpMe.h:415
@ NORMALIZED_THRESHOLD
Definition vpMe.h:154
Implementation of a pose vector and operations on poses.
vpCameraParameters getCameraParameters(const rs2_stream &stream, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithDistortion, int index=-1) const
void acquire(vpImage< unsigned char > &grey, double *ts=nullptr)
bool open(const rs2::config &cfg=rs2::config())
Class that is a wrapper over the Video4Linux2 (V4L2) driver.
void open(vpImage< unsigned char > &I)
void setScale(unsigned scale=vpV4l2Grabber::DEFAULT_SCALE)
void setDevice(const std::string &devname)
XML parser to load and save intrinsic camera parameters.
std::shared_ptr< vpDisplay > createDisplay()
Return a smart pointer vpDisplay specialization if a GUI library is available or nullptr otherwise.
vpDisplay * allocateDisplay()
Return a newly allocated vpDisplay specialization if a GUI library is available or nullptr otherwise.
VISP_EXPORT double measureTimeMs()