Visual Servoing Platform version 3.7.0
Loading...
Searching...
No Matches
servoFlirPtuIBVS.cpp
1/*
2 * ViSP, open source Visual Servoing Platform software.
3 * Copyright (C) 2005 - 2025 by Inria. All rights reserved.
4 *
5 * This software is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation; either version 2 of the License, or
8 * (at your option) any later version.
9 * See the file LICENSE.txt at the root directory of this source
10 * distribution for additional information about the GNU GPL.
11 *
12 * For using ViSP with software that can not be combined with the GNU
13 * GPL, please contact Inria about acquiring a ViSP Professional
14 * Edition License.
15 *
16 * See https://visp.inria.fr for more information.
17 *
18 * This software was developed at:
19 * Inria Rennes - Bretagne Atlantique
20 * Campus Universitaire de Beaulieu
21 * 35042 Rennes Cedex
22 * France
23 *
24 * If you have questions regarding the use of this file, please contact
25 * Inria at visp@inria.fr
26 *
27 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
28 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
29 *
30 * Description:
31 * tests the control law
32 * eye-in-hand control
33 * velocity computed in the camera frame
34 */
55
56#include <iostream>
57
58#include <visp3/core/vpCameraParameters.h>
59#include <visp3/core/vpConfig.h>
60#include <visp3/core/vpXmlParserCamera.h>
61#include <visp3/detection/vpDetectorAprilTag.h>
62#include <visp3/gui/vpDisplayFactory.h>
63#include <visp3/gui/vpPlot.h>
64#include <visp3/io/vpImageIo.h>
65#include <visp3/robot/vpRobotFlirPtu.h>
66#include <visp3/sensor/vpFlyCaptureGrabber.h>
67#include <visp3/visual_features/vpFeatureBuilder.h>
68#include <visp3/visual_features/vpFeaturePoint.h>
69#include <visp3/vs/vpServo.h>
70#include <visp3/vs/vpServoDisplay.h>
71
72#if defined(VISP_HAVE_FLIR_PTU_SDK) && defined(VISP_HAVE_FLYCAPTURE) && \
73 defined(VISP_HAVE_DISPLAY) && defined(VISP_HAVE_PUGIXML)
74
75#ifdef ENABLE_VISP_NAMESPACE
76using namespace VISP_NAMESPACE_NAME;
77#endif
78
79void display_point_trajectory(const vpImage<unsigned char> &I, const vpImagePoint &ip,
80 std::vector<vpImagePoint> &traj_ip)
81{
82 if (traj_ip.size()) {
83 // Add the point only if distance with the previous > 2 pixel
84 if (vpImagePoint::distance(ip, traj_ip.back()) > 2.) {
85 traj_ip.push_back(ip);
86 }
87 }
88 else {
89 traj_ip.push_back(ip);
90 }
91 for (size_t j = 1; j < traj_ip.size(); j++) {
92 vpDisplay::displayLine(I, traj_ip[j - 1], traj_ip[j], vpColor::green, 2);
93 }
94}
95
96int main(int argc, char **argv)
97{
98 std::string opt_portname;
99 int opt_baudrate = 9600;
100 bool opt_network = false;
101 std::string opt_extrinsic;
102 std::string opt_intrinsic;
103 std::string opt_camera_name;
104 bool display_tag = true;
105 int opt_quad_decimate = 2;
106 double opt_tag_size = 0.120; // Used to compute the distance of the cog wrt the camera
107 bool opt_verbose = false;
108 bool opt_plot = false;
109 bool opt_adaptive_gain = false;
110 bool opt_task_sequencing = false;
111 double opt_constant_gain = 0.5;
112 bool opt_display_trajectory = true;
113 double convergence_threshold = 0.00005;
114
115 if (argc == 1) {
116 std::cout << "To see how to use this example, run: " << argv[0] << " --help" << std::endl;
117 return EXIT_SUCCESS;
118 }
119
120 for (int i = 1; i < argc; i++) {
121 if ((std::string(argv[i]) == "--portname" || std::string(argv[i]) == "-p") && (i + 1 < argc)) {
122 opt_portname = std::string(argv[++i]);
123 }
124 else if ((std::string(argv[i]) == "--baudrate" || std::string(argv[i]) == "-b") && (i + 1 < argc)) {
125 opt_baudrate = std::atoi(argv[++i]);
126 }
127 else if ((std::string(argv[i]) == "--network" || std::string(argv[i]) == "-n")) {
128 opt_network = true;
129 }
130 else if (std::string(argv[i]) == "--extrinsic" && i + 1 < argc) {
131 opt_extrinsic = std::string(argv[++i]);
132 }
133 else if (std::string(argv[i]) == "--intrinsic" && i + 1 < argc) {
134 opt_intrinsic = std::string(argv[++i]);
135 }
136 else if (std::string(argv[i]) == "--camera-name" && i + 1 < argc) {
137 opt_camera_name = std::string(argv[++i]);
138 }
139 else if (std::string(argv[i]) == "--verbose" || std::string(argv[i]) == "-v") {
140 opt_verbose = true;
141 }
142 else if (std::string(argv[i]) == "--plot" || std::string(argv[i]) == "-p") {
143 opt_plot = true;
144 }
145 else if (std::string(argv[i]) == "--display-image-trajectory" || std::string(argv[i]) == "-traj") {
146 opt_display_trajectory = true;
147 }
148 else if (std::string(argv[i]) == "--adaptive-gain" || std::string(argv[i]) == "-a") {
149 opt_adaptive_gain = true;
150 }
151 else if (std::string(argv[i]) == "--constant-gain" || std::string(argv[i]) == "-g") {
152 opt_constant_gain = std::stod(argv[i + 1]);
153 }
154 else if (std::string(argv[i]) == "--task-sequencing") {
155 opt_task_sequencing = true;
156 }
157 else if (std::string(argv[i]) == "--tag-size" && i + 1 < argc) {
158 opt_tag_size = std::stod(argv[++i]);
159 }
160 else if (std::string(argv[i]) == "--tag-quad-decimate" && i + 1 < argc) {
161 opt_quad_decimate = std::stoi(argv[++i]);
162 }
163 else if (std::string(argv[i]) == "--no-convergence-threshold") {
164 convergence_threshold = 0.;
165 }
166 else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") {
167 std::cout << "SYNOPSIS" << std::endl
168 << " " << argv[0] << " [--portname <portname>] [--baudrate <rate>] [--network] "
169 << "[--extrinsic <extrinsic.yaml>] [--intrinsic <intrinsic.xml>] [--camera-name <name>] "
170 << "[--tag-size <size>] [--tag-quad-decimate <decimation>] "
171 << "[--adaptive-gain] [--constant-gain] [--display-image-trajectory] [--plot] [--task-sequencing] "
172 << "[--no-convergence-threshold] [--verbose] [--help] [-h]" << std::endl
173 << std::endl;
174 std::cout << "DESCRIPTION" << std::endl
175 << " --portname, -p <portname>" << std::endl
176 << " Set serial or tcp port name." << std::endl
177 << std::endl
178 << " --baudrate, -b <rate>" << std::endl
179 << " Set serial communication baud rate. Default: " << opt_baudrate << "." << std::endl
180 << std::endl
181 << " --network, -n" << std::endl
182 << " Get PTU network information (Hostname, IP, Gateway) and exit. " << std::endl
183 << std::endl
184 << " --reset, -r" << std::endl
185 << " Reset PTU axis and exit. " << std::endl
186 << std::endl
187 << " --extrinsic <extrinsic.yaml>" << std::endl
188 << " YAML file containing extrinsic camera parameters as a vpHomogeneousMatrix." << std::endl
189 << " It corresponds to the homogeneous transformation eMc, between end-effector" << std::endl
190 << " and camera frame." << std::endl
191 << std::endl
192 << " --intrinsic <intrinsic.xml>" << std::endl
193 << " Intrinsic camera parameters obtained after camera calibration." << std::endl
194 << std::endl
195 << " --camera-name <name>" << std::endl
196 << " Name of the camera to consider in the xml file provided for intrinsic camera parameters."
197 << std::endl
198 << " --tag-size <size>" << std::endl
199 << " Width in meter or the black part of the AprilTag used as target." << std::endl
200 << " Default " << opt_tag_size << std::endl
201 << std::endl
202 << " --tag-quad-decimate <decimation>" << std::endl
203 << " Decimation factor used to detect AprilTag." << std::endl
204 << " Default " << opt_quad_decimate << std::endl
205 << std::endl
206 << " --adaptive-gain, -a" << std::endl
207 << " Enable adaptive gain instead of constant gain to speed up convergence. " << std::endl
208 << std::endl
209 << " --constant-gain, -g" << std::endl
210 << " Constant gain value. Default value: " << opt_constant_gain << std::endl
211 << std::endl
212 << " --display-image-trajectory, -traj" << std::endl
213 << " Display the trajectory of the target cog in the image. " << std::endl
214 << std::endl
215 << " --plot, -p" << std::endl
216 << " Enable curve plotter. " << std::endl
217 << std::endl
218 << " --task-sequencing" << std::endl
219 << " Enable task sequencing that allows to smoothly control the velocity of the robot. " << std::endl
220 << std::endl
221 << " --no-convergence-threshold" << std::endl
222 << " Disable ending servoing when it reaches the desired position." << std::endl
223 << std::endl
224 << " --verbose, -v" << std::endl
225 << " Additional printings. " << std::endl
226 << std::endl
227 << " --help, -h" << std::endl
228 << " Print this helper message. " << std::endl
229 << std::endl;
230 std::cout << "EXAMPLE" << std::endl
231 << " - How to get network IP" << std::endl
232#ifdef _WIN32
233 << " $ " << argv[0] << " --portname COM1 --network" << std::endl
234 << " Try to connect FLIR PTU to port: COM1 with baudrate: 9600" << std::endl
235#else
236 << " $ " << argv[0] << " -p /dev/ttyUSB0 --network" << std::endl
237 << " Try to connect FLIR PTU to port: /dev/ttyUSB0 with baudrate: 9600" << std::endl
238#endif
239 << " PTU HostName: PTU-5" << std::endl
240 << " PTU IP : 169.254.110.254" << std::endl
241 << " PTU Gateway : 0.0.0.0" << std::endl
242 << " - How to run this binary using network communication" << std::endl
243 << " $ " << argv[0] << " --portname tcp:169.254.110.254 --tag-size 0.1 --gain 0.1" << std::endl;
244
245 return EXIT_SUCCESS;
246 }
247 }
248
249 vpRobotFlirPtu robot;
250
251#if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
252 std::shared_ptr<vpDisplay> display;
253#else
254 vpDisplay *display = nullptr;
255#endif
256
257 try {
258 std::cout << "Try to connect FLIR PTU to port: " << opt_portname << " with baudrate: " << opt_baudrate << std::endl;
259 robot.connect(opt_portname, opt_baudrate);
260
261 if (opt_network) {
262 std::cout << "PTU HostName: " << robot.getNetworkHostName() << std::endl;
263 std::cout << "PTU IP : " << robot.getNetworkIP() << std::endl;
264 std::cout << "PTU Gateway : " << robot.getNetworkGateway() << std::endl;
265 return EXIT_SUCCESS;
266 }
267
269
271 g.open(I);
272
273 // Get camera extrinsics
276 eRc << 0, 0, 1, -1, 0, 0, 0, -1, 0;
277 etc << -0.1, -0.123, 0.035;
278 vpHomogeneousMatrix eMc(etc, eRc);
279
280 // If provided, read camera extrinsics from command line option
281 if (!opt_extrinsic.empty()) {
282 vpPoseVector ePc;
283 ePc.loadYAML(opt_extrinsic, ePc);
284 eMc.buildFrom(ePc);
285 }
286 else {
287 std::cout << "***************************************************************" << std::endl;
288 std::cout << "Warning, use hard coded values for extrinsic camera parameters." << std::endl;
289 std::cout << "Create a yaml file that contains the extrinsic:" << std::endl
290 << std::endl
291 << "$ cat eMc.yaml" << std::endl
292 << "rows: 4" << std::endl
293 << "cols: 4" << std::endl
294 << "data:" << std::endl
295 << " - [0, 0, 1, -0.1]" << std::endl
296 << " - [-1, 0, 0, -0.123]" << std::endl
297 << " - [0, -1, 0, 0.035]" << std::endl
298 << " - [0, 0, 0, 1]" << std::endl
299 << std::endl
300 << "and load this file with [--extrinsic <extrinsic.yaml] command line option, like:" << std::endl
301 << std::endl
302 << "$ " << argv[0] << "-p " << opt_portname << " --extrinsic eMc.yaml" << std::endl
303 << std::endl;
304 std::cout << "***************************************************************" << std::endl;
305 }
306
307 std::cout << "Considered extrinsic transformation eMc:\n" << eMc << std::endl;
308
309 // Get camera intrinsics
310 vpCameraParameters cam(900, 900, I.getWidth() / 2., I.getHeight() / 2.);
311
312 // If provided, read camera intrinsics from command line option
313 if (!opt_intrinsic.empty() && !opt_camera_name.empty()) {
315 parser.parse(cam, opt_intrinsic, opt_camera_name, vpCameraParameters::perspectiveProjWithoutDistortion);
316 }
317 else {
318 std::cout << "***************************************************************" << std::endl;
319 std::cout << "Warning, use hard coded values for intrinsic camera parameters." << std::endl;
320 std::cout << "Calibrate your camera and load the parameters from command line options, like:" << std::endl
321 << std::endl
322 << "$ " << argv[0] << "-p " << opt_portname << " --intrinsic camera.xml --camera-name \"Camera\""
323 << std::endl
324 << std::endl;
325 std::cout << "***************************************************************" << std::endl;
326 }
327
328 std::cout << "Considered intrinsic camera parameters:\n" << cam << "\n";
329
330#if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
331 display = vpDisplayFactory::createDisplay(I, 10, 10, "Color image");
332#else
333 display = vpDisplayFactory::allocateDisplay(I, 10, 10, "Color image");
334#endif
335
338 vpDetectorAprilTag detector(tagFamily);
339 detector.setAprilTagPoseEstimationMethod(poseEstimationMethod);
340 detector.setDisplayTag(display_tag);
341 detector.setAprilTagQuadDecimate(opt_quad_decimate);
342
343 // Create visual features
344 vpFeaturePoint p, pd; // We use 1 point, the tag cog
345
346 // Set desired position to the image center
347 pd.set_x(0);
348 pd.set_y(0);
349
351 // Add the visual feature point
352 task.addFeature(p, pd);
354 task.setInteractionMatrixType(vpServo::CURRENT);
355
356 if (opt_adaptive_gain) {
357 vpAdaptiveGain lambda(1.5, 0.4, 30); // lambda(0)=4, lambda(oo)=0.4 and lambda'(0)=30
358 task.setLambda(lambda);
359 }
360 else {
361 task.setLambda(opt_constant_gain);
362 }
363
364 vpPlot *plotter = nullptr;
365 int iter_plot = 0;
366
367 if (opt_plot) {
368 plotter = new vpPlot(2, static_cast<int>(250 * 2), 500, static_cast<int>(I.getWidth()) + 80, 10,
369 "Real time curves plotter");
370 plotter->setTitle(0, "Visual features error");
371 plotter->setTitle(1, "Joint velocities");
372 plotter->initGraph(0, 2);
373 plotter->initGraph(1, 2);
374 plotter->setLegend(0, 0, "error_feat_p_x");
375 plotter->setLegend(0, 1, "error_feat_p_y");
376 plotter->setLegend(1, 0, "qdot_pan");
377 plotter->setLegend(1, 1, "qdot_tilt");
378 }
379
380 bool final_quit = false;
381 bool has_converged = false;
382 bool send_velocities = false;
383 bool servo_started = false;
384 std::vector<vpImagePoint> traj_cog;
385 vpMatrix eJe;
386
387 static double t_init_servo = vpTime::measureTimeMs();
388
389 robot.set_eMc(eMc); // Set location of the camera wrt end-effector frame
390
391 vpVelocityTwistMatrix cVe = robot.get_cVe();
392 std::cout << cVe << std::endl;
393 task.set_cVe(cVe);
394
395 robot.setRobotState(vpRobot::STATE_VELOCITY_CONTROL);
396
397 while (!has_converged && !final_quit) {
398 double t_start = vpTime::measureTimeMs();
399
400 g.acquire(I);
401
403
404 std::vector<vpHomogeneousMatrix> cMo_vec;
405 detector.detect(I, opt_tag_size, cam, cMo_vec);
406
407 std::stringstream ss;
408 ss << "Left click to " << (send_velocities ? "stop the robot" : "servo the robot") << ", right click to quit.";
409 vpDisplay::displayText(I, 20, 20, ss.str(), vpColor::red);
410
411 vpColVector qdot(2);
412
413 // Only one tag has to be detected
414 if (detector.getNbObjects() == 1) {
415
416 vpImagePoint cog = detector.getCog(0);
417 double Z = cMo_vec[0][2][3];
418
419 // Update current feature from measured cog position
420 double x = 0, y = 0;
422 if (opt_verbose) {
423 std::cout << "Z: " << Z << std::endl;
424 }
425 p.set_xyZ(x, y, Z);
426 pd.set_Z(Z);
427
428 // Get robot Jacobian
429 robot.get_eJe(eJe);
430 task.set_eJe(eJe);
431
432 if (opt_task_sequencing) {
433 if (!servo_started) {
434 if (send_velocities) {
435 servo_started = true;
436 }
437 t_init_servo = vpTime::measureTimeMs();
438 }
439 qdot = task.computeControlLaw((vpTime::measureTimeMs() - t_init_servo) / 1000.);
440 }
441 else {
442 qdot = task.computeControlLaw();
443 }
444
445 // Display the current and desired feature points in the image display
447
448 // Display the trajectory of the points used as features
449 if (opt_display_trajectory) {
450 display_point_trajectory(I, cog, traj_cog);
451 }
452
453 if (opt_plot) {
454 plotter->plot(0, iter_plot, task.getError());
455 plotter->plot(1, iter_plot, qdot);
456 iter_plot++;
457 }
458
459 if (opt_verbose) {
460 std::cout << "qdot: " << qdot.t() << std::endl;
461 }
462
463 double error = task.getError().sumSquare();
464 ss.str("");
465 ss << "error: " << error;
466 vpDisplay::displayText(I, 20, static_cast<int>(I.getWidth()) - 150, ss.str(), vpColor::red);
467
468 if (opt_verbose)
469 std::cout << "error: " << error << std::endl;
470
471 if (error < convergence_threshold) {
472 has_converged = true;
473 std::cout << "Servo task has converged"
474 << "\n";
475 vpDisplay::displayText(I, 100, 20, "Servo task has converged", vpColor::red);
476 }
477 } // end if (cMo_vec.size() == 1)
478 else {
479 qdot = 0;
480 }
481
482 if (!send_velocities) {
483 qdot = 0;
484 }
485
486 // Send to the robot
487 robot.setVelocity(vpRobot::JOINT_STATE, qdot);
488 ss.str("");
489 ss << "Loop time: " << vpTime::measureTimeMs() - t_start << " ms";
490 vpDisplay::displayText(I, 40, 20, ss.str(), vpColor::red);
491
493
495 if (vpDisplay::getClick(I, button, false)) {
496 switch (button) {
498 send_velocities = !send_velocities;
499 break;
500
502 final_quit = true;
503 qdot = 0;
504 break;
505
506 default:
507 break;
508 }
509 }
510 }
511 std::cout << "Stop the robot " << std::endl;
512 robot.setRobotState(vpRobot::STATE_STOP);
513
514 if (opt_plot && plotter != nullptr) {
515 delete plotter;
516 plotter = nullptr;
517 }
518
519 if (!final_quit) {
520 while (!final_quit) {
521 g.acquire(I);
523
524 vpDisplay::displayText(I, 20, 20, "Click to quit the program.", vpColor::red);
525 vpDisplay::displayText(I, 40, 20, "Visual servo converged.", vpColor::red);
526
527 if (vpDisplay::getClick(I, false)) {
528 final_quit = true;
529 }
530
532 }
533 }
534 }
535 catch (const vpRobotException &e) {
536 std::cout << "Catch Flir Ptu signal exception: " << e.getMessage() << std::endl;
537 robot.setRobotState(vpRobot::STATE_STOP);
538 }
539 catch (const vpException &e) {
540 std::cout << "ViSP exception: " << e.what() << std::endl;
541 std::cout << "Stop the robot " << std::endl;
542 robot.setRobotState(vpRobot::STATE_STOP);
543 }
544
545#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
546 if (display != nullptr) {
547 delete display;
548 }
549#endif
550 return EXIT_SUCCESS;
551}
552#else
553int main()
554{
555#if !defined(VISP_HAVE_FLYCAPTURE)
556 std::cout << "Install FLIR Flycapture" << std::endl;
557#endif
558#if !defined(VISP_HAVE_FLIR_PTU_SDK)
559 std::cout << "Install FLIR PTU SDK." << std::endl;
560#endif
561#if !defined(VISP_HAVE_PUGIXML)
562 std::cout << "pugixml built-in 3rdparty is requested." << std::endl;
563#endif
564 return EXIT_SUCCESS;
565}
566#endif
Adaptive gain computation.
static bool loadYAML(const std::string &filename, vpArray2D< Type > &A, char *header=nullptr)
Definition vpArray2D.h:874
Generic class defining intrinsic camera parameters.
@ perspectiveProjWithoutDistortion
Perspective projection without distortion model.
Implementation of column vector and the associated operations.
static const vpColor red
Definition vpColor.h:198
static const vpColor green
Definition vpColor.h:201
@ TAG_36h11
AprilTag 36h11 pattern (recommended).
Class that defines generic functionalities for display.
Definition vpDisplay.h:171
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
static void flush(const vpImage< unsigned char > &I)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
Definition vpException.h:60
Class that defines a 2D point visual feature which is composed by two parameters that are the cartes...
void set_y(double y)
void set_x(double x)
void set_Z(double Z)
void open(vpImage< unsigned char > &I)
void acquire(vpImage< unsigned char > &I)
Implementation of an homogeneous matrix and operations on such kind of matrices.
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
static double distance(const vpImagePoint &iP1, const vpImagePoint &iP2)
Definition of the vpImage class member functions.
Definition vpImage.h:131
Implementation of a matrix and operations on matrices.
Definition vpMatrix.h:175
static void convertPoint(const vpCameraParameters &cam, const double &u, const double &v, double &x, double &y)
This class enables real time drawing of 2D or 3D graphics. An instance of the class open a window whi...
Definition vpPlot.h:117
Implementation of a pose vector and operations on poses.
Error that can be emitted by the vpRobot class and its derivatives.
@ JOINT_STATE
Definition vpRobot.h:79
@ STATE_VELOCITY_CONTROL
Initialize the velocity controller.
Definition vpRobot.h:64
@ STATE_STOP
Stops robot motion especially in velocity and acceleration control.
Definition vpRobot.h:63
Implementation of a rotation matrix and operations on such kind of matrices.
static void display(const vpServo &s, const vpCameraParameters &cam, const vpImage< unsigned char > &I, vpColor currentColor=vpColor::green, vpColor desiredColor=vpColor::red, unsigned int thickness=1)
@ EYEINHAND_L_cVe_eJe
Definition vpServo.h:183
@ CURRENT
Definition vpServo.h:217
Class that consider the case of a translation vector.
XML parser to load and save intrinsic camera parameters.
std::shared_ptr< vpDisplay > createDisplay()
Return a smart pointer vpDisplay specialization if a GUI library is available or nullptr otherwise.
vpDisplay * allocateDisplay()
Return a newly allocated vpDisplay specialization if a GUI library is available or nullptr otherwise.
VISP_EXPORT double measureTimeMs()