Visual Servoing Platform version 3.7.0
Loading...
Searching...
No Matches
tutorial-mb-generic-tracker-full.cpp
1
2#include <visp3/core/vpConfig.h>
3#include <visp3/core/vpIoTools.h>
4#include <visp3/gui/vpDisplayFactory.h>
5#include <visp3/gui/vpPlot.h>
7#include <visp3/mbt/vpMbGenericTracker.h>
9#include <visp3/io/vpVideoReader.h>
10#include <visp3/io/vpVideoWriter.h>
11
12#if defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_VIDEOIO) && defined(HAVE_OPENCV_HIGHGUI)
13#ifdef ENABLE_VISP_NAMESPACE
14using namespace VISP_NAMESPACE_NAME;
15#endif
16
17namespace
18{
19std::vector<double> poseToVec(const vpHomogeneousMatrix &cMo)
20{
21 vpThetaUVector tu = cMo.getThetaUVector();
22 vpTranslationVector t = cMo.getTranslationVector();
23 std::vector<double> vec { t[0], t[1], t[2], tu[0], tu[1], tu[2] };
24
25 return vec;
26}
27}
28#endif
29
30int main(int argc, char **argv)
31{
32#if defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_VIDEOIO) && defined(HAVE_OPENCV_HIGHGUI) && (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11) && defined(VISP_HAVE_DISPLAY)
33 std::string opt_videoname = "model/teabox/teabox.mp4";
34 std::string opt_modelname = "model/teabox/teabox.cao";
35 int opt_tracker = 0;
36 int opt_video_first_frame = -1;
37 int opt_downscale_img = 1;
38 bool opt_verbose = false;
39 bool opt_plot = true;
40 bool opt_display_scale_auto = false;
41 bool opt_step_by_step = false;
42 vpColVector opt_dof_to_estimate(6, 1.); // Here we consider 6 dof estimation
43 std::string opt_save;
44#if defined(VISP_HAVE_MINIZ) && defined(VISP_HAVE_WORKING_REGEX)
45 std::string opt_save_results;
46#endif
47 unsigned int thickness = 2;
48
50 std::shared_ptr<vpDisplay> display;
51 std::shared_ptr<vpPlot> plot;
52 std::shared_ptr<vpVideoWriter> writer;
53
54 unsigned int right_display_offset = 170;
55
56 try {
57 for (int i = 1; i < argc; i++) {
58 if (std::string(argv[i]) == "--video" && i + 1 < argc) {
59 opt_videoname = std::string(argv[++i]);
60 }
61 else if (std::string(argv[i]) == "--video-first-frame" && i + 1 < argc) {
62 opt_video_first_frame = std::atoi(argv[++i]);
63 }
64 else if (std::string(argv[i]) == "--model" && i + 1 < argc) {
65 opt_modelname = std::string(argv[++i]);
66 }
67 else if (std::string(argv[i]) == "--tracker" && i + 1 < argc) {
68 opt_tracker = atoi(argv[++i]);
69 }
70 else if (std::string(argv[i]) == "--downscale-img" && i + 1 < argc) {
71 opt_downscale_img = std::atoi(argv[++i]);
72 }
73 else if (std::string(argv[i]) == "--save" && i + 1 < argc) {
74 opt_save = std::string(argv[++i]);
75 }
76#if defined(VISP_HAVE_MINIZ) && defined(VISP_HAVE_WORKING_REGEX)
77 else if (std::string(argv[i]) == "--save-results" && i + 1 < argc) {
78 opt_save_results = std::string(argv[++i]);
79 }
80#endif
81 else if (std::string(argv[i]) == "--plot") {
82 opt_plot = true;
83 }
84 else if (std::string(argv[i]) == "--dof" && i + 6 < argc) {
85 for (int j = 0; j < 6; j++) {
86 int val = std::atoi(argv[++i]);
87 if (val == 0 || val == 1) {
88 opt_dof_to_estimate[j] = val;
89 }
90 else {
91 std::cout << "Error: wrong value after --dof option. Authorized values are 0 or 1 for each 6 dof to estimate." << std::endl;
92 return EXIT_FAILURE;
93 }
94 }
95 }
96 else if (std::string(argv[i]) == "--display-scale-auto") {
97 opt_display_scale_auto = true;
98 }
99 else if (std::string(argv[i]) == "--step-by-step") {
100 opt_step_by_step = true;
101 }
102 else if (std::string(argv[i]) == "--verbose" || std::string(argv[i]) == "-v") {
103 opt_verbose = true;
104 }
105 else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") {
106 std::cout << "\nSYNOPSIS " << std::endl
107 << argv[0]
108 << " [--video <video name>]"
109 << " [--video-first-frame <image index>"
110 << " [--model <model name>"
111 << " [--tracker <0=egde|1=keypoint|2=hybrid>]"
112 << " [--downscale-img <scale factor>]"
113 << " [--dof <0/1 0/1 0/1 0/1 0/1 0/1>]"
114 << " [--save <e.g. results-%04d.png>]"
115#if defined(VISP_HAVE_MINIZ) && defined(VISP_HAVE_WORKING_REGEX)
116 << " [--save-results <e.g. tracking_poses.npz>]"
117#endif
118 << " [--display-scale-auto]"
119 << " [--step-by-step]"
120 << " [--plot]"
121 << " [--verbose,-v]"
122 << " [--help,-h]"
123 << std::endl;
124 std::cout << "\nOPTIONS " << std::endl
125 << " --video <video name>" << std::endl
126 << " Input video name." << std::endl
127 << " Default: model/teabox/teabox.mp4" << std::endl
128 << std::endl
129 << " --video-first-frame <image index>" << std::endl
130 << " Index of the first image to process." << std::endl
131 << " Set to -1 to process the first image of the video." << std::endl
132 << " Default: -1" << std::endl
133 << std::endl
134 << " --model <model name>" << std::endl
135 << " CAD model filename. Supported formats are .cao and .wrl." << std::endl
136 << " To use wrl format, ViSP need to be built with Coin3D third-party." << std::endl
137 << " Default: model/teabox/teabox.cao" << std::endl
138 << std::endl
139 << " --tracker <0=egde|1=keypoint|2=hybrid>" << std::endl
140 << " Tracker type:" << std::endl
141 << " - when 0: use only moving-edges" << std::endl
142 << " - when 1: use only KLT keypoints" << std::endl
143 << " - when 2: use hybrid scheme, moving-edges and KLT keypoints." << std::endl
144 << " Default: 0" << std::endl
145 << std::endl
146 << " --downscale-img <scale factor>" << std::endl
147 << " Downscale input image width and height by this factor." << std::endl
148 << " When set to 1, image not down scaled. When set to 2, image width" << std::endl
149 << " and height is divided by 2." << std::endl
150 << " Default: 1" << std::endl
151 << std::endl
152 << " --dof <0/1 0/1 0/1 0/1 0/1 0/1>" << std::endl
153 << " 6-dim vector of 0 and 1 to indicate which dof [tx ty tz rx ry rz]" << std::endl
154 << " has to be estimated." << std::endl
155 << " When set to 1 the dof is estimated. When rather set to 0 the dof" << std::endl
156 << " is not estimated. It's value is the one from the initialisation." << std::endl
157 << " Default: 1 1 1 1 1 1 (to estimate all 6 dof)" << std::endl
158 << std::endl
159 << " --save <e.g. results-%04d.png>" << std::endl
160 << " Name of the saved image sequence that contains tracking results in overlay." << std::endl
161 << " When the name contains a folder like in the next example, the folder" << std::endl
162 << " is created if it doesn't exist."
163 << " Example: \"result/image-%04d.png\"." << std::endl
164 << std::endl
165#if defined(VISP_HAVE_MINIZ) && defined(VISP_HAVE_WORKING_REGEX)
166 << " --save-results <e.g. tracking_results.npz>" << std::endl
167 << " Name of the npz file containing cMo data estimated from MBT." << std::endl
168 << " When the name contains a folder like in the next example, the folder" << std::endl
169 << " is created if it doesn't exist."
170 << " Example: \"result/tracking_results.npz\"." << std::endl
171 << std::endl
172#endif
173 << " --display-scale-auto" << std::endl
174 << " Enable display window auto scaling to ensure that the image is fully" << std::endl
175 << " visible on the screen. Useful for large images." << std::endl
176 << " Note that this option doesn't affect the size of the processed images." << std::endl
177 << std::endl
178 << " --step-by-step" << std::endl
179 << " Enable step by step mode wainting for a user click to process next image." << std::endl
180 << std::endl
181 << " --plot" << std::endl
182 << " Open a window that plots the estimated pose evolution." << std::endl
183 << std::endl
184 << " --verbose, -v" << std::endl
185 << " Enable verbose mode." << std::endl
186 << std::endl
187 << " --help, -h" << std::endl
188 << " Display this helper message." << std::endl
189 << std::endl;
190 return EXIT_SUCCESS;
191 }
192 else {
193 std::cout << "Error: unknown option: " << argv[i] << std::endl;
194 std::cout << "To get the helper run: " << argv[0] << " --help" << std::endl;
195 return EXIT_FAILURE;
196 }
197 }
198 std::string parentname = vpIoTools::getParent(opt_modelname);
199 std::string objectname = vpIoTools::getNameWE(opt_modelname);
200
201 if (!parentname.empty())
202 objectname = parentname + "/" + objectname;
203
204 std::cout << " *********** Tracker config ************ " << std::endl;
205 std::cout << "Video name : " << opt_videoname << std::endl;
206 std::cout << "Tracker cad model file : " << objectname << ".[cao or wrl]" << std::endl;
207 std::cout << "Tracker init file : " << objectname << ".init" << std::endl;
208 std::cout << "Tracker optional init image: " << objectname << ".[png,ppm,jpg]" << std::endl;
209 if (opt_downscale_img > 1) {
210 std::cout << "Downscale image factor : " << opt_downscale_img << std::endl;
211 }
212 std::cout << "Dof to estimate : " << opt_dof_to_estimate.t() << std::endl;
213
214 // Create output folder if needed
215 if (!opt_save.empty()) {
216 std::string parent = vpIoTools::getParent(opt_save);
217 if (!parent.empty()) {
218 std::cout << "Create output directory: " << parent << std::endl;
220 }
221 }
222#if defined(VISP_HAVE_MINIZ) && defined(VISP_HAVE_WORKING_REGEX)
223 if (!opt_save_results.empty()) {
224 std::string parent = vpIoTools::getParent(opt_save_results);
225 if (!parent.empty()) {
226 std::cout << "Create output directory for the npz file: " << parent << std::endl;
228 }
229 }
230#endif
231
233 vpImage<vpRGBa> Ivideo;
238
240 g.setFileName(opt_videoname);
241 if (opt_video_first_frame > 0) {
242 g.setFirstFrameIndex(static_cast<unsigned int>(opt_video_first_frame));
243 }
244 if (opt_downscale_img > 1) {
245 g.open(Ivideo);
246 Ivideo.subsample(opt_downscale_img, opt_downscale_img, I);
247 }
248 else {
249 g.open(I);
250 }
251
253 if (!opt_save.empty()) {
254 writer = std::make_shared<vpVideoWriter>();
255 writer->setFileName(opt_save);
256 writer->open(O);
257 }
258
260 if (opt_display_scale_auto) {
261 display->setDownScalingFactor(vpDisplay::SCALE_AUTO);
262 }
263 display->init(I, 100, 100, "Model-based tracker");
264
265 if (opt_plot) {
266 plot = std::make_shared<vpPlot>(2, 700, 700, display->getWindowXPosition() + I.getWidth() / display->getDownScalingFactor() + 30,
267 display->getWindowYPosition(), "Estimated pose");
268 plot->initGraph(0, 3); // Translation
269 plot->setTitle(0, "Translation [m]");
270 plot->setColor(0, 0, vpColor::red);
271 plot->setColor(0, 1, vpColor::green);
272 plot->setColor(0, 2, vpColor::blue);
273 plot->initGraph(1, 3); // Attitude
274 plot->setTitle(1, "Attitude thetaU [deg]");
275 plot->setColor(1, 0, vpColor::red);
276 plot->setColor(1, 1, vpColor::green);
277 plot->setColor(1, 2, vpColor::blue);
278 }
279
282 if (opt_tracker == 0)
284#if defined(VISP_HAVE_MODULE_KLT) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
285 else if (opt_tracker == 1)
287 else
289#else
290 else {
291 std::cout << "klt and hybrid model-based tracker are not available since visp_klt module is not available. "
292 "In CMakeGUI turn visp_klt module ON, configure and build ViSP again."
293 << std::endl;
294 return EXIT_FAILURE;
295 }
296#endif
298
299 bool usexml = false;
301#if defined(VISP_HAVE_PUGIXML)
302 if (vpIoTools::checkFilename(objectname + ".xml")) {
303 std::cout << "Tracker config file : " << objectname + ".xml" << std::endl;
304 tracker.loadConfigFile(objectname + ".xml");
305 usexml = true;
306 }
307#endif
309
310 if (!usexml) {
312 if (opt_tracker == 0 || opt_tracker == 2) {
314 vpMe me;
315 me.setMaskSize(5);
316 me.setMaskNumber(180);
317 me.setInitRange(-1); // To use the default values defined for each type of primitive
318 me.setRange(8);
320 me.setThreshold(20);
321 me.setMinThreshold(-1); // To deactivate automatic thresholding
322 me.setThresholdMarginRatio(-1.); // To deactivate automatic thresholding
323 me.setMu1(0.5);
324 me.setMu2(0.5);
325 me.setSampleStep(4);
326 tracker.setMovingEdge(me);
328 }
329
330#if defined(VISP_HAVE_MODULE_KLT) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
331 if (opt_tracker == 1 || opt_tracker == 2) {
333 vpKltOpencv klt_settings;
334 klt_settings.setMaxFeatures(300);
335 klt_settings.setWindowSize(5);
336 klt_settings.setQuality(0.015);
337 klt_settings.setMinDistance(8);
338 klt_settings.setHarrisFreeParameter(0.01);
339 klt_settings.setBlockSize(3);
340 klt_settings.setPyramidLevels(3);
341 tracker.setKltOpencv(klt_settings);
342 tracker.setKltMaskBorder(5);
344 }
345#endif
346
348 tracker.setAngleAppear(vpMath::rad(70));
349 tracker.setAngleDisappear(vpMath::rad(80));
352 tracker.setNearClippingDistance(0.1);
353 tracker.setFarClippingDistance(100.0);
356 tracker.setClipping(tracker.getClipping() | vpMbtPolygon::FOV_CLIPPING);
358
361 cam.initPersProjWithoutDistortion(839.21470, 839.44555, 325.66776, 243.69727);
362 tracker.setCameraParameters(cam);
365 }
366
369 tracker.setOgreVisibilityTest(false);
370 tracker.setOgreShowConfigDialog(false);
373 tracker.setScanLineVisibilityTest(true);
376
378 if (vpIoTools::checkFilename(objectname + ".cao"))
379 tracker.loadModel(objectname + ".cao");
382 else if (vpIoTools::checkFilename(objectname + ".wrl"))
383 tracker.loadModel(objectname + ".wrl");
386 tracker.setDisplayFeatures(true);
388
389 tracker.setGoodMovingEdgesRatioThreshold(0.2);
390
391 if (opt_dof_to_estimate != 1.) {
392 tracker.setEstimatedDoF(opt_dof_to_estimate);
393 }
394
397 tracker.getCameraParameters(cam);
398 std::cout << "Camera parameters: \n" << cam << std::endl;
400
401 std::cout << "Initialize tracker on image size: " << I.getWidth() << " x " << I.getHeight() << std::endl;
402
403 std::vector<double> vec_poses;
404#if defined(VISP_HAVE_MINIZ) && defined(VISP_HAVE_WORKING_REGEX)
405 if (!opt_save_results.empty()) {
406 const unsigned int height = I.getHeight(), width = I.getWidth();
407 visp::cnpy::npz_save(opt_save_results, "height", &height, { 1 }, "w");
408 visp::cnpy::npz_save(opt_save_results, "width", &width, { 1 }, "a");
409
410 const double cam_px = cam.get_px(), cam_py = cam.get_py(), cam_u0 = cam.get_u0(), cam_v0 = cam.get_v0();
411 visp::cnpy::npz_save(opt_save_results, "cam_px", &cam_px, { 1 }, "a");
412 visp::cnpy::npz_save(opt_save_results, "cam_py", &cam_py, { 1 }, "a");
413 visp::cnpy::npz_save(opt_save_results, "cam_u0", &cam_u0, { 1 }, "a");
414 visp::cnpy::npz_save(opt_save_results, "cam_v0", &cam_v0, { 1 }, "a");
415 }
416#endif
417
419 tracker.initClick(I, objectname + ".init", true);
421
422#if defined(VISP_HAVE_NLOHMANN_JSON)
423 std::string json_config_file = "mbt-config.json";
424 std::cout << "Save tracker configuration in: " << json_config_file << std::endl;
425 tracker.saveConfigFile(json_config_file);
426#endif
427 bool quit = false;
428 while (!quit && !g.end()) {
429 double time_start = vpTime::measureTimeMs();
430 if (opt_downscale_img > 1) {
431 g.acquire(Ivideo);
432 Ivideo.subsample(opt_downscale_img, opt_downscale_img, I);
433 }
434 else {
435 g.acquire(I);
436 }
437 std::stringstream ss;
438 ss << "Process image " << g.getFrameIndex();
439 if (opt_verbose) {
440 std::cout << "-- " << ss.str() << std::endl;
441 }
444 tracker.track(I);
447 tracker.getPose(cMo);
450 tracker.display(I, cMo, cam, vpColor::red, thickness);
452 vpDisplay::displayFrame(I, cMo, cam, 0.025, vpColor::none, thickness);
453 vpDisplay::displayText(I, 20 * display->getDownScalingFactor(), 10 * display->getDownScalingFactor(), "Right click to exit...", vpColor::red);
454 vpDisplay::displayText(I, 40 * display->getDownScalingFactor(), 10 * display->getDownScalingFactor(), "Middle click to change mode", vpColor::red);
455 vpDisplay::displayText(I, 20 * display->getDownScalingFactor(), (I.getWidth() - right_display_offset) * display->getDownScalingFactor(), std::string("Mode: ") + (opt_step_by_step ? std::string("step-by-step") : std::string("continuous")), vpColor::red);
456 vpDisplay::displayText(I, 40 * display->getDownScalingFactor(), (I.getWidth() - right_display_offset) * display->getDownScalingFactor(), ss.str(), vpColor::red);
457 if (opt_step_by_step) {
458 vpDisplay::displayText(I, 60 * display->getDownScalingFactor(), 10 * display->getDownScalingFactor(), "Left click to process next image", vpColor::red);
459 }
460 {
461 std::stringstream ss;
462 ss << "Features";
463 if (tracker.getTrackerType() & vpMbGenericTracker::EDGE_TRACKER) {
464 ss << " edge: " << tracker.getNbFeaturesEdge();
465 }
466#if defined(VISP_HAVE_MODULE_KLT) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
467 if (tracker.getTrackerType() & vpMbGenericTracker::KLT_TRACKER) {
468 ss << " klt: " << tracker.getNbFeaturesKlt();
469 }
470#endif
471 vpDisplay::displayText(I, 60 * display->getDownScalingFactor(), (I.getWidth() - right_display_offset) * display->getDownScalingFactor(), ss.str(), vpColor::red);
472 if (opt_verbose) {
473 std::cout << ss.str() << std::endl;
474 std::cout << "cMo:\n" << cMo << std::endl;
475 }
476 }
477 {
478 double proj_error = tracker.computeCurrentProjectionError(I, cMo, cam);
479 std::stringstream ss;
480 ss << "Projection error: " << std::setprecision(2) << proj_error << " deg";
481 vpDisplay::displayText(I, 80 * display->getDownScalingFactor(), (I.getWidth() - right_display_offset) * display->getDownScalingFactor(), ss.str(), vpColor::red);
482 if (opt_verbose) {
483 std::cout << ss.str() << std::endl;
484 }
485 }
486
487 if (opt_plot) {
488 vpTranslationVector c_t_o = cMo.getTranslationVector();
489 vpThetaUVector c_tu_o = vpThetaUVector(cMo.getRotationMatrix());
490 vpColVector c_tu_o_deg = vpMath::deg(c_tu_o);
491 plot->plot(0, g.getFrameIndex(), c_t_o);
492 plot->plot(1, g.getFrameIndex(), c_tu_o_deg);
493 }
494
495#if defined(VISP_HAVE_MINIZ) && defined(VISP_HAVE_WORKING_REGEX)
496 if (!opt_save_results.empty()) {
497 std::vector<double> vec_pose = poseToVec(cMo);
498 vec_poses.insert(vec_poses.end(), vec_pose.begin(), vec_pose.end());
499 }
500#endif
501
503 if (vpDisplay::getClick(I, button, opt_step_by_step)) {
504 if (button == vpMouseButton::button3) {
505 quit = true;
506 }
507 else if (button == vpMouseButton::button2) {
508 opt_step_by_step = !opt_step_by_step;
509 }
510 }
511
512 {
513 std::stringstream ss;
514 ss << "Loop time: " << std::fixed << std::setprecision(0) << vpTime::measureTimeMs() - time_start << " ms";
515 vpDisplay::displayText(I, (I.getHeight() - 20) * display->getDownScalingFactor(), (I.getWidth() - right_display_offset) * display->getDownScalingFactor(), ss.str(), vpColor::red);
516 }
518
519 if (!opt_save.empty()) {
521 writer->saveFrame(O);
522 }
523 }
525
526#if defined(VISP_HAVE_MINIZ) && defined(VISP_HAVE_WORKING_REGEX)
527 if (!opt_save_results.empty()) {
528 visp::cnpy::npz_save(opt_save_results, "vec_poses", vec_poses.data(), { static_cast<size_t>(vec_poses.size()/6), 6 }, "a");
529 }
530#endif
531 }
532 catch (const vpException &e) {
533 std::cout << "Catch a ViSP exception: " << e << std::endl;
535 }
536#ifdef VISP_HAVE_OGRE
537 catch (Ogre::Exception &e) {
538 std::cout << "Catch an Ogre exception: " << e.getDescription() << std::endl;
540 }
541#endif
542#else
543 (void)argc;
544 (void)argv;
545 std::cout << "Install OpenCV and rebuild ViSP to use this example." << std::endl;
546#endif
547 return EXIT_SUCCESS;
548}
Generic class defining intrinsic camera parameters.
Implementation of column vector and the associated operations.
static const vpColor red
Definition vpColor.h:198
static const vpColor none
Definition vpColor.h:210
static const vpColor blue
Definition vpColor.h:204
static const vpColor green
Definition vpColor.h:201
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayFrame(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, double size, const vpColor &color=vpColor::none, unsigned int thickness=1, const vpImagePoint &offset=vpImagePoint(0, 0), const std::string &frameName="", const vpColor &textColor=vpColor::black, const vpImagePoint &textOffset=vpImagePoint(15, 15))
static void getImage(const vpImage< unsigned char > &Is, vpImage< vpRGBa > &Id)
static void flush(const vpImage< unsigned char > &I)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
Definition vpException.h:60
Implementation of an homogeneous matrix and operations on such kind of matrices.
Definition of the vpImage class member functions.
Definition vpImage.h:131
void subsample(unsigned int v_scale, unsigned int h_scale, vpImage< Type > &sampled) const
Definition vpImage.h:755
static bool checkFilename(const std::string &filename)
static void makeDirectory(const std::string &dirname)
static std::string getNameWE(const std::string &pathname)
static std::string getParent(const std::string &pathname)
Wrapper for the KLT (Kanade-Lucas-Tomasi) feature tracker implemented in OpenCV. Thus to enable this ...
Definition vpKltOpencv.h:83
void setBlockSize(int blockSize)
void setQuality(double qualityLevel)
void setHarrisFreeParameter(double harris_k)
void setMaxFeatures(int maxCount)
void setMinDistance(double minDistance)
void setWindowSize(int winSize)
void setPyramidLevels(int pyrMaxLevel)
static double rad(double deg)
Definition vpMath.h:129
static double deg(double rad)
Definition vpMath.h:119
Real-time 6D object pose tracking using its CAD model.
Definition vpMe.h:143
void setMu1(const double &mu_1)
Definition vpMe.h:408
void setMinThreshold(const double &minThreshold)
Definition vpMe.h:517
void setInitRange(const int &initRange)
Definition vpMe.h:368
void setRange(const unsigned int &range)
Definition vpMe.h:438
void setLikelihoodThresholdType(const vpLikelihoodThresholdType likelihood_threshold_type)
Definition vpMe.h:531
void setMaskNumber(const unsigned int &mask_number)
Definition vpMe.cpp:555
void setThreshold(const double &threshold)
Definition vpMe.h:489
void setSampleStep(const double &sample_step)
Definition vpMe.h:445
void setMaskSize(const unsigned int &mask_size)
Definition vpMe.cpp:563
void setThresholdMarginRatio(const double &thresholdMarginRatio)
Definition vpMe.h:500
void setMu2(const double &mu_2)
Definition vpMe.h:415
@ NORMALIZED_THRESHOLD
Definition vpMe.h:154
Implementation of a rotation vector as axis-angle minimal representation.
Class that consider the case of a translation vector.
Class that enables to manipulate easily a video file or a sequence of images. As it inherits from the...
void open(vpImage< vpRGBa > &I) VP_OVERRIDE
void setFileName(const std::string &filename)
void setFirstFrameIndex(const long first_frame)
long getFrameIndex() const
void acquire(vpImage< vpRGBa > &I) VP_OVERRIDE
VISP_EXPORT void npz_save(const std::string &zipname, std::string fname, const std::vector< std::string > &data_vec, const std::vector< size_t > &shape, const std::string &mode="w")
std::shared_ptr< vpDisplay > createDisplay()
Return a smart pointer vpDisplay specialization if a GUI library is available or nullptr otherwise.
VISP_EXPORT double measureTimeMs()