Visual Servoing Platform version 3.7.0
Loading...
Searching...
No Matches
servoFrankaIBVS.cpp
1/*
2 * ViSP, open source Visual Servoing Platform software.
3 * Copyright (C) 2005 - 2025 by Inria. All rights reserved.
4 *
5 * This software is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation; either version 2 of the License, or
8 * (at your option) any later version.
9 * See the file LICENSE.txt at the root directory of this source
10 * distribution for additional information about the GNU GPL.
11 *
12 * For using ViSP with software that can not be combined with the GNU
13 * GPL, please contact Inria about acquiring a ViSP Professional
14 * Edition License.
15 *
16 * See https://visp.inria.fr for more information.
17 *
18 * This software was developed at:
19 * Inria Rennes - Bretagne Atlantique
20 * Campus Universitaire de Beaulieu
21 * 35042 Rennes Cedex
22 * France
23 *
24 * If you have questions regarding the use of this file, please contact
25 * Inria at visp@inria.fr
26 *
27 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
28 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
29 *
30 * Description:
31 * tests the control law
32 * eye-in-hand control
33 * velocity computed in the camera frame
34 */
35
60
61#include <iostream>
62
63#include <visp3/core/vpConfig.h>
64#include <visp3/core/vpCameraParameters.h>
65#include <visp3/core/vpIoTools.h>
66#include <visp3/core/vpXmlParserCamera.h>
67#include <visp3/detection/vpDetectorAprilTag.h>
68#include <visp3/gui/vpDisplayFactory.h>
69#include <visp3/gui/vpPlot.h>
70#include <visp3/io/vpImageIo.h>
71#include <visp3/robot/vpRobotFranka.h>
72#include <visp3/sensor/vpRealSense2.h>
73#include <visp3/visual_features/vpFeatureBuilder.h>
74#include <visp3/visual_features/vpFeaturePoint.h>
75#include <visp3/vs/vpServo.h>
76#include <visp3/vs/vpServoDisplay.h>
77
78#if defined(VISP_HAVE_REALSENSE2) && defined(VISP_HAVE_DISPLAY) && defined(VISP_HAVE_FRANKA) && defined(VISP_HAVE_PUGIXML)
79
80#ifdef ENABLE_VISP_NAMESPACE
81using namespace VISP_NAMESPACE_NAME;
82#endif
83
84void display_point_trajectory(const vpImage<unsigned char> &I, const std::vector<vpImagePoint> &vip,
85 std::vector<vpImagePoint> *traj_vip)
86{
87 for (size_t i = 0; i < vip.size(); ++i) {
88 if (traj_vip[i].size()) {
89 // Add the point only if distance with the previous > 1 pixel
90 if (vpImagePoint::distance(vip[i], traj_vip[i].back()) > 1.) {
91 traj_vip[i].push_back(vip[i]);
92 }
93 }
94 else {
95 traj_vip[i].push_back(vip[i]);
96 }
97 }
98 for (size_t i = 0; i < vip.size(); ++i) {
99 for (size_t j = 1; j < traj_vip[i].size(); j++) {
100 vpDisplay::displayLine(I, traj_vip[i][j - 1], traj_vip[i][j], vpColor::green, 2);
101 }
102 }
103}
104
105int main(int argc, char **argv)
106{
107 double opt_tag_size = 0.120;
108 bool opt_tag_z_aligned = false;
109 std::string opt_robot_ip = "192.168.1.1";
110 std::string opt_eMc_filename = "";
111 std::string opt_intrinsic_filename = "";
112 std::string opt_camera_name = "Camera";
113 bool display_tag = true;
114 int opt_quad_decimate = 2;
115 bool opt_verbose = false;
116 bool opt_plot = false;
117 bool opt_adaptive_gain = false;
118 bool opt_task_sequencing = false;
119 double convergence_threshold = 0.00005;
120
121 for (int i = 1; i < argc; ++i) {
122 if ((std::string(argv[i]) == "--tag-size") && (i + 1 < argc)) {
123 opt_tag_size = std::stod(argv[++i]);
124 }
125 else if ((std::string(argv[i]) == "--tag-quad-decimate") && (i + 1 < argc)) {
126 opt_quad_decimate = std::stoi(argv[++i]);
127 }
128 else if (std::string(argv[i]) == "--tag-z-aligned") {
129 opt_tag_z_aligned = true;
130 }
131 else if ((std::string(argv[i]) == "--ip") && (i + 1 < argc)) {
132 opt_robot_ip = std::string(argv[++i]);
133 }
134 else if (std::string(argv[i]) == "--intrinsic" && i + 1 < argc) {
135 opt_intrinsic_filename = std::string(argv[++i]);
136 }
137 else if (std::string(argv[i]) == "--camera-name" && i + 1 < argc) {
138 opt_camera_name = std::string(argv[++i]);
139 }
140 else if ((std::string(argv[i]) == "--eMc") && (i + 1 < argc)) {
141 opt_eMc_filename = std::string(argv[++i]);
142 }
143 else if (std::string(argv[i]) == "--verbose") {
144 opt_verbose = true;
145 }
146 else if (std::string(argv[i]) == "--plot") {
147 opt_plot = true;
148 }
149 else if (std::string(argv[i]) == "--adaptive-gain") {
150 opt_adaptive_gain = true;
151 }
152 else if (std::string(argv[i]) == "--task-sequencing") {
153 opt_task_sequencing = true;
154 }
155 else if (std::string(argv[i]) == "--no-convergence-threshold") {
156 convergence_threshold = 0.;
157 }
158 else if ((std::string(argv[i]) == "--help") || (std::string(argv[i]) == "-h")) {
159 std::cout << "SYNOPSYS" << std::endl
160 << " " << argv[0]
161 << " [--ip <controller ip>]"
162 << " [--intrinsic <xml file>]"
163 << " [--camera-name <name>]"
164 << " [--tag-size <size>]"
165 << " [--tag-quad-decimate <decimation factor>]"
166 << " [--tag-z-aligned]"
167 << " [--eMc <extrinsic transformation file>]"
168 << " [--adaptive-gain]"
169 << " [--plot]"
170 << " [--task-sequencing]"
171 << " [--no-convergence-threshold]"
172 << " [--verbose]"
173 << " [--help] [-h]\n"
174 << std::endl;
175 std::cout << "DESCRIPTION" << std::endl
176 << " Use an image-based visual-servoing scheme to position the camera in front of an Apriltag." << std::endl
177 << std::endl
178 << " --ip <controller ip>" << std::endl
179 << " Franka controller ip address" << std::endl
180 << " Default: " << opt_robot_ip << std::endl
181 << std::endl
182 << " --intrinsic <xml file>" << std::endl
183 << " XML file that contains camera intrinsic parameters. " << std::endl
184 << " If no file is specified, use Realsense camera factory intrinsic parameters." << std::endl
185 << std::endl
186 << " --camera-name <name>" << std::endl
187 << " Camera name in the XML file that contains camera intrinsic parameters." << std::endl
188 << " Default: \"Camera\"" << std::endl
189 << std::endl
190 << " --tag-size <size>" << std::endl
191 << " Apriltag size in [m]." << std::endl
192 << " Default: " << opt_tag_size << " [m]" << std::endl
193 << std::endl
194 << " --tag-z-aligned" << std::endl
195 << " When enabled, tag z-axis and camera z-axis are aligned." << std::endl
196 << " Default: false" << std::endl
197 << std::endl
198 << " --eMc <extrinsic transformation file>" << std::endl
199 << " File containing the homogeneous transformation matrix between" << std::endl
200 << " robot end-effector and camera frame." << std::endl
201 << std::endl
202 << " --tag-quad-decimate <decimation factor>" << std::endl
203 << " Decimation factor used during Apriltag detection." << std::endl
204 << " Default: " << opt_quad_decimate << std::endl
205 << std::endl
206 << " --adaptive-gain" << std::endl
207 << " Flag to enable adaptive gain to speed up visual servo near convergence." << std::endl
208 << std::endl
209 << " --plot" << std::endl
210 << " Flag to enable curve plotter." << std::endl
211 << std::endl
212 << " --task-sequencing" << std::endl
213 << " Flag to enable task sequencing scheme." << std::endl
214 << std::endl
215 << " --no-convergence-threshold" << std::endl
216 << " Flag to disable convergence threshold used to stop the visual servo." << std::endl
217 << std::endl
218 << " --verbose" << std::endl
219 << " Flag to enable extra verbosity." << std::endl
220 << std::endl
221 << " --help, -h" << std::endl
222 << " Print this helper message." << std::endl
223 << std::endl;
224
225 return EXIT_SUCCESS;
226 }
227 else {
228 std::cout << "\nERROR" << std::endl
229 << std::string(argv[i]) << " command line option is not supported." << std::endl
230 << "Use " << std::string(argv[0]) << " --help" << std::endl
231 << std::endl;
232 return EXIT_FAILURE;
233 }
234 }
235
236 vpRealSense2 rs;
237 rs2::config config;
238 unsigned int width = 640, height = 480;
239 config.enable_stream(RS2_STREAM_COLOR, 640, 480, RS2_FORMAT_RGBA8, 30);
240 config.enable_stream(RS2_STREAM_DEPTH, 640, 480, RS2_FORMAT_Z16, 30);
241 config.enable_stream(RS2_STREAM_INFRARED, 640, 480, RS2_FORMAT_Y8, 30);
242 rs.open(config);
243
244 vpImage<unsigned char> I(height, width);
245
246#if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
247 std::shared_ptr<vpDisplay> display = vpDisplayFactory::createDisplay(I, 10, 10, "Current image");
248#else
249 vpDisplay *display = vpDisplayFactory::allocateDisplay(I, 10, 10, "Current image");
250#endif
251
252 std::cout << "Parameters:" << std::endl;
253 std::cout << " Apriltag " << std::endl;
254 std::cout << " Size [m] : " << opt_tag_size << std::endl;
255 std::cout << " Z aligned : " << (opt_tag_z_aligned ? "true" : "false") << std::endl;
256 std::cout << " Camera intrinsics " << std::endl;
257 std::cout << " Factory parameters : " << (opt_intrinsic_filename.empty() ? "yes" : "no") << std::endl;
258
259 // Get camera intrinsics
261 if (opt_intrinsic_filename.empty()) {
262 std::cout << "Use Realsense camera intrinsic factory parameters: " << std::endl;
264 std::cout << "cam:\n" << cam << std::endl;
265 }
266 else if (!vpIoTools::checkFilename(opt_intrinsic_filename)) {
267 std::cout << "Camera parameters file " << opt_intrinsic_filename << " doesn't exist." << std::endl;
268 return EXIT_FAILURE;
269 }
270 else {
272 if (!opt_camera_name.empty()) {
273
274 std::cout << " Param file name [.xml]: " << opt_intrinsic_filename << std::endl;
275 std::cout << " Camera name : " << opt_camera_name << std::endl;
276
277 if (parser.parse(cam, opt_intrinsic_filename, opt_camera_name, vpCameraParameters::perspectiveProjWithDistortion) !=
279 std::cout << "Unable to parse parameters with distortion for camera \"" << opt_camera_name << "\" from "
280 << opt_intrinsic_filename << " file" << std::endl;
281 std::cout << "Attempt to find parameters without distortion" << std::endl;
282
283 if (parser.parse(cam, opt_intrinsic_filename, opt_camera_name,
285 std::cout << "Unable to parse parameters without distortion for camera \"" << opt_camera_name << "\" from "
286 << opt_intrinsic_filename << " file" << std::endl;
287 return EXIT_FAILURE;
288 }
289 }
290 }
291 }
292
293 std::cout << "Camera parameters used to compute the pose:\n" << cam << std::endl;
294
295 // Setup Apriltag detector
298 // vpDetectorAprilTag::vpPoseEstimationMethod poseEstimationMethod = vpDetectorAprilTag::BEST_RESIDUAL_VIRTUAL_VS;
299 vpDetectorAprilTag detector(tagFamily);
300 detector.setAprilTagPoseEstimationMethod(poseEstimationMethod);
301 detector.setDisplayTag(display_tag);
302 detector.setAprilTagQuadDecimate(opt_quad_decimate);
303 detector.setZAlignedWithCameraAxis(opt_tag_z_aligned);
304
305 // Setup camera extrinsics
306 vpPoseVector e_P_c;
307 // Set camera extrinsics default values
308 e_P_c[0] = 0.0337731;
309 e_P_c[1] = -0.00535012;
310 e_P_c[2] = -0.0523339;
311 e_P_c[3] = -0.247294;
312 e_P_c[4] = -0.306729;
313 e_P_c[5] = 1.53055;
314
315 // If provided, read camera extrinsics from --eMc <file>
316 if (!opt_eMc_filename.empty()) {
317 e_P_c.loadYAML(opt_eMc_filename, e_P_c);
318 }
319 else {
320 std::cout << "Warning, opt_eMc_filename is empty! Use hard coded values." << std::endl;
321 }
322 vpHomogeneousMatrix e_M_c(e_P_c);
323 std::cout << "e_M_c:\n" << e_M_c << std::endl;
324
325 // Set desired pose used to compute the desired features
326 vpHomogeneousMatrix cd_M_o(vpTranslationVector(0, 0, opt_tag_size * 3), // 3 times tag with along camera z axis
327 vpRotationMatrix({ 1, 0, 0, 0, -1, 0, 0, 0, -1 }));
328
329 vpRobotFranka robot;
330
331 try {
332 robot.connect(opt_robot_ip);
333
334 // Create visual features
335 std::vector<vpFeaturePoint> p(4), pd(4); // We use 4 points
336
337 // Define 4 3D points corresponding to the CAD model of the Apriltag
338 std::vector<vpPoint> point(4);
339 point[0].setWorldCoordinates(-opt_tag_size / 2., -opt_tag_size / 2., 0);
340 point[1].setWorldCoordinates(+opt_tag_size / 2., -opt_tag_size / 2., 0);
341 point[2].setWorldCoordinates(+opt_tag_size / 2., +opt_tag_size / 2., 0);
342 point[3].setWorldCoordinates(-opt_tag_size / 2., +opt_tag_size / 2., 0);
343
344 // Setup IBVS
346 // Add the 4 visual feature points
347 for (size_t i = 0; i < p.size(); ++i) {
348 task.addFeature(p[i], pd[i]);
349 }
351 task.setInteractionMatrixType(vpServo::CURRENT);
352
353 if (opt_adaptive_gain) {
354 vpAdaptiveGain lambda(1.5, 0.4, 30); // lambda(0)=4, lambda(oo)=0.4 and lambda'(0)=30
355 task.setLambda(lambda);
356 }
357 else {
358 task.setLambda(0.5);
359 }
360
361 vpPlot *plotter = nullptr;
362 int iter_plot = 0;
363
364 if (opt_plot) {
365 plotter = new vpPlot(2, static_cast<int>(250 * 2), 500, static_cast<int>(I.getWidth()) + 80, 10,
366 "Real time curves plotter");
367 plotter->setTitle(0, "Visual features error");
368 plotter->setTitle(1, "Camera velocities");
369 plotter->initGraph(0, 8);
370 plotter->initGraph(1, 6);
371 plotter->setLegend(0, 0, "error_feat_p1_x");
372 plotter->setLegend(0, 1, "error_feat_p1_y");
373 plotter->setLegend(0, 2, "error_feat_p2_x");
374 plotter->setLegend(0, 3, "error_feat_p2_y");
375 plotter->setLegend(0, 4, "error_feat_p3_x");
376 plotter->setLegend(0, 5, "error_feat_p3_y");
377 plotter->setLegend(0, 6, "error_feat_p4_x");
378 plotter->setLegend(0, 7, "error_feat_p4_y");
379 plotter->setLegend(1, 0, "vc_x");
380 plotter->setLegend(1, 1, "vc_y");
381 plotter->setLegend(1, 2, "vc_z");
382 plotter->setLegend(1, 3, "wc_x");
383 plotter->setLegend(1, 4, "wc_y");
384 plotter->setLegend(1, 5, "wc_z");
385 }
386
387 bool final_quit = false;
388 bool has_converged = false;
389 bool send_velocities = false;
390 bool servo_started = false;
391 std::vector<vpImagePoint> *traj_corners = nullptr; // To memorize point trajectory
392
393 static double t_init_servo = vpTime::measureTimeMs();
394
395 robot.set_eMc(e_M_c); // Set location of the camera wrt end-effector frame
396 robot.setRobotState(vpRobot::STATE_VELOCITY_CONTROL);
397
398 vpHomogeneousMatrix cd_M_c, c_M_o, o_M_o;
399
400 while (!has_converged && !final_quit) {
401 double t_start = vpTime::measureTimeMs();
402
403 rs.acquire(I);
404
406
407 std::vector<vpHomogeneousMatrix> c_M_o_vec;
408 bool ret = detector.detect(I, opt_tag_size, cam, c_M_o_vec);
409
410 {
411 std::stringstream ss;
412 ss << "Left click to " << (send_velocities ? "stop the robot" : "servo the robot") << ", right click to quit.";
413 vpDisplay::displayText(I, 20, 20, ss.str(), vpColor::red);
414 }
415
416 vpColVector v_c(6);
417
418 // Only one tag is detected
419 if (ret && (c_M_o_vec.size() == 1)) {
420 c_M_o = c_M_o_vec[0];
421
422 static bool first_time = true;
423 if (first_time) {
424 // Introduce security wrt tag positioning in order to avoid PI rotation
425 std::vector<vpHomogeneousMatrix> secure_o_M_o(2), secure_cd_M_c(2);
426 secure_o_M_o[1].buildFrom(0, 0, 0, 0, 0, M_PI);
427 for (size_t i = 0; i < 2; ++i) {
428 secure_cd_M_c[i] = cd_M_o * secure_o_M_o[i] * c_M_o.inverse();
429 }
430 if (std::fabs(secure_cd_M_c[0].getThetaUVector().getTheta()) < std::fabs(secure_cd_M_c[1].getThetaUVector().getTheta())) {
431 o_M_o = secure_o_M_o[0];
432 }
433 else {
434 std::cout << "Desired frame modified to avoid PI rotation of the camera" << std::endl;
435 o_M_o = secure_o_M_o[1]; // Introduce PI rotation
436 }
437
438 // Compute the desired position of the features from the desired pose
439 for (size_t i = 0; i < point.size(); ++i) {
440 vpColVector c_P, p;
441 point[i].changeFrame(cd_M_o * o_M_o, c_P);
442 point[i].projection(c_P, p);
443
444 pd[i].set_x(p[0]);
445 pd[i].set_y(p[1]);
446 pd[i].set_Z(c_P[2]);
447 }
448 }
449
450 // Get tag corners
451 std::vector<vpImagePoint> corners = detector.getPolygon(0);
452
453 // Update visual features
454 for (size_t i = 0; i < corners.size(); ++i) {
455 // Update the point feature from the tag corners location
456 vpFeatureBuilder::create(p[i], cam, corners[i]);
457 // Set the feature Z coordinate from the pose
458 vpColVector c_P;
459 point[i].changeFrame(c_M_o, c_P);
460
461 p[i].set_Z(c_P[2]);
462 }
463
464 if (opt_task_sequencing) {
465 if (!servo_started) {
466 if (send_velocities) {
467 servo_started = true;
468 }
469 t_init_servo = vpTime::measureTimeMs();
470 }
471 v_c = task.computeControlLaw((vpTime::measureTimeMs() - t_init_servo) / 1000.);
472 }
473 else {
474 v_c = task.computeControlLaw();
475 }
476
477 // Display the current and desired feature points in the image display
478 vpServoDisplay::display(task, cam, I);
479 for (size_t i = 0; i < corners.size(); ++i) {
480 std::stringstream ss;
481 ss << i;
482 // Display current point indexes
483 vpDisplay::displayText(I, corners[i] + vpImagePoint(15, 15), ss.str(), vpColor::red);
484 // Display desired point indexes
485 vpImagePoint ip;
486 vpMeterPixelConversion::convertPoint(cam, pd[i].get_x(), pd[i].get_y(), ip);
487 vpDisplay::displayText(I, ip + vpImagePoint(15, 15), ss.str(), vpColor::red);
488 }
489 if (first_time) {
490 traj_corners = new std::vector<vpImagePoint>[corners.size()];
491 }
492 // Display the trajectory of the points used as features
493 display_point_trajectory(I, corners, traj_corners);
494
495 if (opt_plot) {
496 plotter->plot(0, iter_plot, task.getError());
497 plotter->plot(1, iter_plot, v_c);
498 iter_plot++;
499 }
500
501 if (opt_verbose) {
502 std::cout << "v_c: " << v_c.t() << std::endl;
503 }
504
505 double error = task.getError().sumSquare();
506 std::stringstream ss;
507 ss << "error: " << error;
508 vpDisplay::displayText(I, 20, static_cast<int>(I.getWidth()) - 150, ss.str(), vpColor::red);
509
510 if (opt_verbose)
511 std::cout << "error: " << error << std::endl;
512
513 if (error < convergence_threshold) {
514 has_converged = true;
515 std::cout << "Servo task has converged" << std::endl;
516 vpDisplay::displayText(I, 100, 20, "Servo task has converged", vpColor::red);
517 }
518 if (first_time) {
519 first_time = false;
520 }
521 } // end if (c_M_o_vec.size() == 1)
522 else {
523 v_c = 0;
524 }
525
526 if (!send_velocities) {
527 v_c = 0;
528 }
529
530 // Send to the robot
531 robot.setVelocity(vpRobot::CAMERA_FRAME, v_c);
532
533 {
534 std::stringstream ss;
535 ss << "Loop time: " << vpTime::measureTimeMs() - t_start << " ms";
536 vpDisplay::displayText(I, 40, 20, ss.str(), vpColor::red);
537 }
539
541 if (vpDisplay::getClick(I, button, false)) {
542 switch (button) {
544 send_velocities = !send_velocities;
545 break;
546
548 final_quit = true;
549 v_c = 0;
550 break;
551
552 default:
553 break;
554 }
555 }
556 }
557 std::cout << "Stop the robot " << std::endl;
558 robot.setRobotState(vpRobot::STATE_STOP);
559
560 if (opt_plot && plotter != nullptr) {
561 delete plotter;
562 plotter = nullptr;
563 }
564
565 if (!final_quit) {
566 while (!final_quit) {
567 rs.acquire(I);
569
570 vpDisplay::displayText(I, 20, 20, "Click to quit the program.", vpColor::red);
571 vpDisplay::displayText(I, 40, 20, "Visual servo converged.", vpColor::red);
572
573 if (vpDisplay::getClick(I, false)) {
574 final_quit = true;
575 }
576
578 }
579 }
580 if (traj_corners) {
581 delete[] traj_corners;
582 }
583 }
584 catch (const vpException &e) {
585 std::cout << "ViSP exception: " << e.what() << std::endl;
586 std::cout << "Stop the robot " << std::endl;
587 robot.setRobotState(vpRobot::STATE_STOP);
588#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
589 if (display != nullptr) {
590 delete display;
591 }
592#endif
593 return EXIT_FAILURE;
594 }
595 catch (const franka::NetworkException &e) {
596 std::cout << "Franka network exception: " << e.what() << std::endl;
597 std::cout << "Check if you are connected to the Franka robot"
598 << " or if you specified the right IP using --ip command line option set by default to 192.168.1.1. "
599 << std::endl;
600#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
601 if (display != nullptr) {
602 delete display;
603 }
604#endif
605 return EXIT_FAILURE;
606 }
607 catch (const std::exception &e) {
608 std::cout << "Franka exception: " << e.what() << std::endl;
609#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
610 if (display != nullptr) {
611 delete display;
612 }
613#endif
614 return EXIT_FAILURE;
615 }
616
617#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
618 if (display != nullptr) {
619 delete display;
620 }
621#endif
622
623 return EXIT_SUCCESS;
624}
625#else
626int main()
627{
628#if !defined(VISP_HAVE_REALSENSE2)
629 std::cout << "Install librealsense-2.x and rebuild ViSP." << std::endl;
630#endif
631#if !defined(VISP_HAVE_FRANKA)
632 std::cout << "Install libfranka and rebuild ViSP." << std::endl;
633#endif
634#if !defined(VISP_HAVE_PUGIXML)
635 std::cout << "Build ViSP with pugixml support enabled." << std::endl;
636#endif
637 return EXIT_SUCCESS;
638}
639#endif
Adaptive gain computation.
static bool loadYAML(const std::string &filename, vpArray2D< Type > &A, char *header=nullptr)
Definition vpArray2D.h:874
Generic class defining intrinsic camera parameters.
@ perspectiveProjWithDistortion
Perspective projection with distortion model.
@ perspectiveProjWithoutDistortion
Perspective projection without distortion model.
Implementation of column vector and the associated operations.
static const vpColor red
Definition vpColor.h:198
static const vpColor green
Definition vpColor.h:201
@ TAG_36h11
AprilTag 36h11 pattern (recommended).
Class that defines generic functionalities for display.
Definition vpDisplay.h:171
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
static void flush(const vpImage< unsigned char > &I)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
Definition vpException.h:60
static void create(vpFeaturePoint &s, const vpCameraParameters &cam, const vpDot &d)
Implementation of an homogeneous matrix and operations on such kind of matrices.
vpHomogeneousMatrix inverse() const
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
static double distance(const vpImagePoint &iP1, const vpImagePoint &iP2)
Definition of the vpImage class member functions.
Definition vpImage.h:131
static bool checkFilename(const std::string &filename)
static void convertPoint(const vpCameraParameters &cam, const double &x, const double &y, double &u, double &v)
This class enables real time drawing of 2D or 3D graphics. An instance of the class open a window whi...
Definition vpPlot.h:117
Implementation of a pose vector and operations on poses.
vpCameraParameters getCameraParameters(const rs2_stream &stream, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithDistortion, int index=-1) const
void acquire(vpImage< unsigned char > &grey, double *ts=nullptr)
bool open(const rs2::config &cfg=rs2::config())
@ CAMERA_FRAME
Definition vpRobot.h:81
@ STATE_VELOCITY_CONTROL
Initialize the velocity controller.
Definition vpRobot.h:64
@ STATE_STOP
Stops robot motion especially in velocity and acceleration control.
Definition vpRobot.h:63
Implementation of a rotation matrix and operations on such kind of matrices.
static void display(const vpServo &s, const vpCameraParameters &cam, const vpImage< unsigned char > &I, vpColor currentColor=vpColor::green, vpColor desiredColor=vpColor::red, unsigned int thickness=1)
@ EYEINHAND_CAMERA
Definition vpServo.h:176
@ CURRENT
Definition vpServo.h:217
Class that consider the case of a translation vector.
XML parser to load and save intrinsic camera parameters.
std::shared_ptr< vpDisplay > createDisplay()
Return a smart pointer vpDisplay specialization if a GUI library is available or nullptr otherwise.
vpDisplay * allocateDisplay()
Return a newly allocated vpDisplay specialization if a GUI library is available or nullptr otherwise.
VISP_EXPORT double measureTimeMs()