Visual Servoing Platform version 3.7.0
Loading...
Searching...
No Matches
servoPioneerPanSegment3D.cpp
1/*
2 * ViSP, open source Visual Servoing Platform software.
3 * Copyright (C) 2005 - 2025 by Inria. All rights reserved.
4 *
5 * This software is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation; either version 2 of the License, or
8 * (at your option) any later version.
9 * See the file LICENSE.txt at the root directory of this source
10 * distribution for additional information about the GNU GPL.
11 *
12 * For using ViSP with software that can not be combined with the GNU
13 * GPL, please contact Inria about acquiring a ViSP Professional
14 * Edition License.
15 *
16 * See https://visp.inria.fr for more information.
17 *
18 * This software was developed at:
19 * Inria Rennes - Bretagne Atlantique
20 * Campus Universitaire de Beaulieu
21 * 35042 Rennes Cedex
22 * France
23 *
24 * If you have questions regarding the use of this file, please contact
25 * Inria at visp@inria.fr
26 *
27 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
28 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
29 *
30 * Description:
31 * IBVS on Pioneer P3DX mobile platform
32 */
33#include <iostream>
34
35#include <visp3/core/vpConfig.h>
36
37#include <visp3/blob/vpDot2.h>
38#include <visp3/core/vpCameraParameters.h>
39#include <visp3/core/vpHomogeneousMatrix.h>
40#include <visp3/core/vpImage.h>
41#include <visp3/core/vpVelocityTwistMatrix.h>
42#include <visp3/gui/vpPlot.h>
43#include <visp3/robot/vpPioneerPan.h>
44#include <visp3/robot/vpRobotBiclops.h>
45#include <visp3/robot/vpRobotPioneer.h> // Include first to avoid build issues with Status, None, isfinite
46#include <visp3/gui/vpDisplayFactory.h>
47#include <visp3/sensor/vp1394CMUGrabber.h>
48#include <visp3/sensor/vp1394TwoGrabber.h>
49#include <visp3/sensor/vpV4l2Grabber.h>
50#include <visp3/visual_features/vpFeatureBuilder.h>
51#include <visp3/visual_features/vpFeatureSegment.h>
52#include <visp3/vs/vpServo.h>
53
54#define USE_REAL_ROBOT
55#define USE_PLOTTER
56#undef VISP_HAVE_V4L2 // To use a firewire camera
57
77#if defined(VISP_HAVE_PIONEER) && defined(VISP_HAVE_BICLOPS)
78int main(int argc, char **argv)
79{
80#ifdef ENABLE_VISP_NAMESPACE
81 using namespace VISP_NAMESPACE_NAME;
82#endif
83
84#if defined(VISP_HAVE_DC1394) || defined(VISP_HAVE_V4L2) || defined(VISP_HAVE_CMU1394)
85#if defined(VISP_HAVE_DISPLAY)
86#if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
87 std::shared_ptr<vpDisplay> display;
88#else
89 vpDisplay *display = nullptr;
90#endif
91 try {
92 vpImage<unsigned char> I; // Create a gray level image container
93 double lambda = 0.1;
94 // Scale parameter used to estimate the depth Z of the blob from its
95 // surface
96 // double coef = 0.9/14.85; // At 0.9m, the blob has a surface of 14.85
97 // (Logitec sphere)
98 double coef = 1.2 / 13.0; // At 1m, the blob has a surface of 11.3 (AVT Pike 032C)
99 double L = 0.21; // 3D horizontal segment length
100 double Z_d = 0.8; // Desired distance along Z between camera and segment
101 bool normalized = true; // segment normilized features are used
102
103 // Warning: To have a non singular task of rank 3, Y_d should be different
104 // from 0 so that the optical axis doesn't intersect the horizontal
105 // segment
106 double Y_d = -.11; // Desired distance along Y between camera and segment.
107 vpColVector qm(2); // Measured head position
108 qm = 0;
109 double qm_pan = 0; // Measured pan position (tilt is not handled in that example)
110
111#ifdef USE_REAL_ROBOT
112 // Initialize the Biclops head
113
114 vpRobotBiclops biclops("/usr/share/BiclopsDefault.cfg");
115 biclops.setDenavitHartenbergModel(vpBiclops::DH1);
116
117 // Move to the initial position
118 vpColVector q(2);
119
120 q = 0;
121 // q[0] = vpMath::rad(63);
122 // q[1] = vpMath::rad(12); // introduce a tilt angle to compensate camera
123 // sphere tilt so that the camera is parallel to the plane
124
125 biclops.setRobotState(vpRobot::STATE_POSITION_CONTROL);
126 biclops.setPosition(vpRobot::ARTICULAR_FRAME, q);
127 // biclops.setPositioningVelocity(50);
128 biclops.getPosition(vpRobot::ARTICULAR_FRAME, qm);
129 qm_pan = qm[0];
130
131 // Now the head will be controlled in velocity
132 biclops.setRobotState(vpRobot::STATE_VELOCITY_CONTROL);
133
134 // Initialize the pioneer robot
135 vpRobotPioneer pioneer;
136 ArArgumentParser parser(&argc, argv);
137 parser.loadDefaultArguments();
138
139 // ArRobotConnector connects to the robot, get some initial data from it
140 // such as type and name, and then loads parameter files for this robot.
141 ArRobotConnector robotConnector(&parser, &pioneer);
142 if (!robotConnector.connectRobot()) {
143 ArLog::log(ArLog::Terse, "Could not connect to the pioneer robot.");
144 if (parser.checkHelpAndWarnUnparsed()) {
145 Aria::logOptions();
146 Aria::exit(1);
147 }
148 }
149 if (!Aria::parseArgs()) {
150 Aria::logOptions();
151 Aria::shutdown();
152 return false;
153 }
154
155 pioneer.useSonar(false); // disable the sonar device usage
156
157 // Wait 3 sec to be sure that the low level Aria thread used to control
158 // the robot is started. Without this delay we experienced a delay
159 // (around 2.2 sec) between the velocity send to the robot and the
160 // velocity that is really applied to the wheels.
161 sleep(3);
162
163 std::cout << "Pioneer robot connected" << std::endl;
164#endif
165
166 vpPioneerPan robot_pan; // Generic robot that computes the velocities for
167 // the pioneer and the Biclops head
168
169 // Camera parameters. In this experiment we don't need a precise
170 // calibration of the camera
172
173// Create the camera framegrabber
174#if defined(VISP_HAVE_V4L2)
175 // Create a grabber based on v4l2 third party lib (for usb cameras under
176 // Linux)
178 g.setScale(1);
179 g.setInput(0);
180 g.setDevice("/dev/video1");
181 g.open(I);
182 // Logitec sphere parameters
183 cam.initPersProjWithoutDistortion(558, 555, 312, 210);
184#elif defined(VISP_HAVE_DC1394)
185 // Create a grabber based on libdc1394-2.x third party lib (for firewire
186 // cameras under Linux)
187 vp1394TwoGrabber g(false);
190 // AVT Pike 032C parameters
191 cam.initPersProjWithoutDistortion(800, 795, 320, 216);
192#elif defined(VISP_HAVE_CMU1394)
193 // Create a grabber based on CMU 1394 third party lib (for firewire
194 // cameras under windows)
196 g.setVideoMode(0, 5); // 640x480 MONO8
197 g.setFramerate(4); // 30 Hz
198 g.open(I);
199 // AVT Pike 032C parameters
200 cam.initPersProjWithoutDistortion(800, 795, 320, 216);
201#endif
202
203 // Acquire an image from the grabber
204 g.acquire(I);
205
206// Create an image viewer
207#if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
208 display = vpDisplayFactory::createDisplay(I, 10, 10, "Current frame");
209#else
210 display = vpDisplayFactory::allocateDisplay(I, 10, 10, "Current frame");
211#endif
214
215 // The 3D segment consists in two horizontal dots
216 vpDot2 dot[2];
217 for (int i = 0; i < 2; i++) {
218 dot[i].setGraphics(true);
219 dot[i].setComputeMoments(true);
220 dot[i].setEllipsoidShapePrecision(0.); // to track a blob without any constraint on the shape
221 dot[i].setGrayLevelPrecision(0.9); // to set the blob gray level bounds for binarisation
222 dot[i].setEllipsoidBadPointsPercentage(0.5); // to be accept 50% of bad
223 // inner and outside points
224 // with bad gray level
225 dot[i].initTracking(I);
227 }
228
231 task.setInteractionMatrixType(vpServo::DESIRED, vpServo::PSEUDO_INVERSE);
232 task.setLambda(lambda);
233 vpVelocityTwistMatrix cVe; // keep to identity
234 cVe = robot_pan.get_cVe();
235 task.set_cVe(cVe);
236
237 std::cout << "cVe: \n" << cVe << std::endl;
238
239 vpMatrix eJe;
240
241 // Update the robot jacobian that depends on the pan position
242 robot_pan.set_eJe(qm_pan);
243 // Get the robot jacobian
244 eJe = robot_pan.get_eJe();
245 task.set_eJe(eJe);
246 std::cout << "eJe: \n" << eJe << std::endl;
247
248 // Define a 3D horizontal segment an its cordinates in the image plane
249 vpPoint P[2];
250 P[0].setWorldCoordinates(-L / 2, 0, 0);
251 P[1].setWorldCoordinates(L / 2, 0, 0);
252 // Define the desired camera position
253 vpHomogeneousMatrix cMo(0, Y_d, Z_d, 0, 0,
254 0); // Here we are in front of the segment
255 for (int i = 0; i < 2; i++) {
256 P[i].changeFrame(cMo);
257 P[i].project(); // Here the x,y parameters obtained by perspective
258 // projection are computed
259 }
260
261 // Estimate the depth of the segment extremity points
262 double surface[2];
263 double Z[2]; // Depth of the segment points
264 for (int i = 0; i < 2; i++) {
265 // Surface of the blob estimated from the image moment m00 and converted
266 // in meters
267 surface[i] = 1. / sqrt(dot[i].m00 / (cam.get_px() * cam.get_py()));
268
269 // Initial depth of the blob
270 Z[i] = coef * surface[i];
271 }
272
273 // Use here a feature segment builder
274 vpFeatureSegment s_segment(normalized),
275 s_segment_d(normalized); // From the segment feature we use only alpha
276 vpFeatureBuilder::create(s_segment, cam, dot[0], dot[1]);
277 s_segment.setZ1(Z[0]);
278 s_segment.setZ2(Z[1]);
279 // Set the desired feature
280 vpFeatureBuilder::create(s_segment_d, P[0], P[1]);
281 s_segment.setZ1(P[0].get_Z()); // Desired depth
282 s_segment.setZ2(P[1].get_Z());
283
284 task.addFeature(s_segment, s_segment_d,
286
287#ifdef USE_PLOTTER
288 // Create a window (500 by 500) at position (700, 10) with two graphics
289 vpPlot graph(2, 500, 500, 700, 10, "Curves...");
290
291 // The first graphic contains 3 curve and the second graphic contains 3
292 // curves
293 graph.initGraph(0, 3);
294 graph.initGraph(1, 3);
295 graph.setTitle(0, "Velocities");
296 graph.setTitle(1, "Error s-s*");
297 graph.setLegend(0, 0, "vx");
298 graph.setLegend(0, 1, "wz");
299 graph.setLegend(0, 2, "w_pan");
300 graph.setLegend(1, 0, "xm/l");
301 graph.setLegend(1, 1, "1/l");
302 graph.setLegend(1, 2, "alpha");
303#endif
304
305 vpColVector v; // vz, wx
306
307 try {
308 unsigned int iter = 0;
309 while (1) {
310#ifdef USE_REAL_ROBOT
311 // Get the new pan position
312 biclops.getPosition(vpRobot::ARTICULAR_FRAME, qm);
313#endif
314 qm_pan = qm[0];
315
316 // Acquire a new image
317 g.acquire(I);
318 // Set the image as background of the viewer
320
321 // Display the desired position of the segment
322 for (int i = 0; i < 2; i++)
323 P[i].display(I, cam, vpColor::red, 3);
324
325 // Does the blob tracking
326 for (int i = 0; i < 2; i++)
327 dot[i].track(I);
328
329 for (int i = 0; i < 2; i++) {
330 // Surface of the blob estimated from the image moment m00 and
331 // converted in meters
332 surface[i] = 1. / sqrt(dot[i].m00 / (cam.get_px() * cam.get_py()));
333
334 // Initial depth of the blob
335 Z[i] = coef * surface[i];
336 }
337
338 // Update the features
339 vpFeatureBuilder::create(s_segment, cam, dot[0], dot[1]);
340 // Update the depth of the point. Useful only if current interaction
341 // matrix is used when task.setInteractionMatrixType(vpServo::CURRENT,
342 // vpServo::PSEUDO_INVERSE) is set
343 s_segment.setZ1(Z[0]);
344 s_segment.setZ2(Z[1]);
345
346 robot_pan.get_cVe(cVe);
347 task.set_cVe(cVe);
348
349 // Update the robot jacobian that depends on the pan position
350 robot_pan.set_eJe(qm_pan);
351 // Get the robot jacobian
352 eJe = robot_pan.get_eJe();
353 // Update the jacobian that will be used to compute the control law
354 task.set_eJe(eJe);
355
356 // Compute the control law. Velocities are computed in the mobile
357 // robot reference frame
358 v = task.computeControlLaw();
359
360 // std::cout << "-----" << std::endl;
361 // std::cout << "v: " << v.t() << std::endl;
362 // std::cout << "error: " << task.getError().t() << std::endl;
363 // std::cout << "L:\n " << task.getInteractionMatrix() <<
364 // std::endl; std::cout << "eJe:\n " << task.get_eJe() <<
365 // std::endl; std::cout << "cVe:\n " << task.get_cVe() <<
366 // std::endl; std::cout << "L_cVe_eJe:\n" <<
367 // task.getInteractionMatrix() * task.get_cVe() * task.get_eJe()
368 // << std::endl; task.print() ;
369 if (task.getTaskRank() != 3)
370 std::cout << "Warning: task is of rank " << task.getTaskRank() << std::endl;
371
372#ifdef USE_PLOTTER
373 graph.plot(0, iter, v); // plot velocities applied to the robot
374 graph.plot(1, iter, task.getError()); // plot error vector
375#endif
376
377#ifdef USE_REAL_ROBOT
378 // Send the velocity to the robot
379 vpColVector v_pioneer(2); // vx, wz
380 v_pioneer[0] = v[0];
381 v_pioneer[1] = v[1];
382 vpColVector v_biclops(2); // qdot pan and tilt
383 v_biclops[0] = v[2];
384 v_biclops[1] = 0;
385
386 std::cout << "Send velocity to the pionner: " << v_pioneer[0] << " m/s " << vpMath::deg(v_pioneer[1])
387 << " deg/s" << std::endl;
388 std::cout << "Send velocity to the Biclops head: " << vpMath::deg(v_biclops[0]) << " deg/s" << std::endl;
389
390 pioneer.setVelocity(vpRobot::REFERENCE_FRAME, v_pioneer);
391 biclops.setVelocity(vpRobot::ARTICULAR_FRAME, v_biclops);
392#endif
393
394 // Draw a vertical line which corresponds to the desired x coordinate
395 // of the dot cog
396 vpDisplay::displayLine(I, 0, cam.get_u0(), 479, cam.get_u0(), vpColor::red);
398
399 // A click in the viewer to exit
400 if (vpDisplay::getClick(I, false))
401 break;
402
403 iter++;
404 // break;
405 }
406 }
407 catch (...) {
408 }
409
410#ifdef USE_REAL_ROBOT
411 std::cout << "Ending robot thread..." << std::endl;
412 pioneer.stopRunning();
413
414 // wait for the thread to stop
415 pioneer.waitForRunExit();
416#endif
417
418 // Kill the servo task
419 task.print();
420#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
421 if (display != nullptr) {
422 delete display;
423 }
424#endif
425 return EXIT_SUCCESS;
426 }
427 catch (const vpException &e) {
428 std::cout << "Catch an exception: " << e << std::endl;
429#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
430 if (display != nullptr) {
431 delete display;
432 }
433#endif
434 return EXIT_FAILURE;
435 }
436#endif
437#endif
438}
439#else
440int main()
441{
442 std::cout << "ViSP is not able to control the Pioneer robot" << std::endl;
443 return EXIT_SUCCESS;
444}
445#endif
Firewire cameras video capture based on CMU 1394 Digital Camera SDK.
void setVideoMode(unsigned long format, unsigned long mode)
void acquire(vpImage< unsigned char > &I)
void setFramerate(unsigned long fps)
void open(vpImage< unsigned char > &I)
Class for firewire ieee1394 video devices using libdc1394-2.x api.
@ DH1
First Denavit-Hartenberg representation.
Definition vpBiclops.h:96
Generic class defining intrinsic camera parameters.
Implementation of column vector and the associated operations.
static const vpColor red
Definition vpColor.h:198
Class that defines generic functionalities for display.
Definition vpDisplay.h:171
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
static void flush(const vpImage< unsigned char > &I)
This tracker is meant to track a blob (connex pixels with same gray level) on a vpImage.
Definition vpDot2.h:127
void setGraphics(bool activate)
Definition vpDot2.h:320
void setGrayLevelPrecision(const double &grayLevelPrecision)
Definition vpDot2.cpp:715
void setEllipsoidBadPointsPercentage(const double &percentage=0.0)
Definition vpDot2.h:292
void setEllipsoidShapePrecision(const double &ellipsoidShapePrecision)
Definition vpDot2.cpp:790
void setComputeMoments(bool activate)
Definition vpDot2.h:278
void initTracking(const vpImage< unsigned char > &I, unsigned int size=0)
Definition vpDot2.cpp:263
error that can be emitted by ViSP classes.
Definition vpException.h:60
static void create(vpFeaturePoint &s, const vpCameraParameters &cam, const vpDot &d)
Class that defines a 2D segment visual features. This class allow to consider two sets of visual feat...
static unsigned int selectAlpha()
static unsigned int selectXc()
static unsigned int selectL()
Implementation of an homogeneous matrix and operations on such kind of matrices.
Definition of the vpImage class member functions.
Definition vpImage.h:131
static double deg(double rad)
Definition vpMath.h:119
Implementation of a matrix and operations on matrices.
Definition vpMatrix.h:175
Generic functions for Pioneer mobile robots equipped with a pan head.
void set_eJe(double q_pan)
This class enables real time drawing of 2D or 3D graphics. An instance of the class open a window whi...
Definition vpPlot.h:117
Class that defines a 3D point in the object frame and allows forward projection of a 3D point in the ...
Definition vpPoint.h:79
void changeFrame(const vpHomogeneousMatrix &cMo, vpColVector &cP) const VP_OVERRIDE
Definition vpPoint.cpp:273
void setWorldCoordinates(double oX, double oY, double oZ)
Definition vpPoint.cpp:116
Interface for the Biclops, pan, tilt head control.
Interface for Pioneer mobile robots based on Aria 3rd party library.
void useSonar(bool usage)
void setVelocity(const vpRobot::vpControlFrameType frame, const vpColVector &vel) VP_OVERRIDE
@ REFERENCE_FRAME
Definition vpRobot.h:75
@ ARTICULAR_FRAME
Definition vpRobot.h:77
@ STATE_POSITION_CONTROL
Initialize the position controller.
Definition vpRobot.h:65
@ STATE_VELOCITY_CONTROL
Initialize the velocity controller.
Definition vpRobot.h:64
@ EYEINHAND_L_cVe_eJe
Definition vpServo.h:183
@ PSEUDO_INVERSE
Definition vpServo.h:250
@ DESIRED
Definition vpServo.h:223
vpMatrix get_eJe() const
Definition vpUnicycle.h:96
vpVelocityTwistMatrix get_cVe() const
Definition vpUnicycle.h:72
Class that is a wrapper over the Video4Linux2 (V4L2) driver.
void setFramerate(vpV4l2FramerateType framerate)
void setInput(unsigned input=vpV4l2Grabber::DEFAULT_INPUT)
void open(vpImage< unsigned char > &I)
void setScale(unsigned scale=vpV4l2Grabber::DEFAULT_SCALE)
void setDevice(const std::string &devname)
std::shared_ptr< vpDisplay > createDisplay()
Return a smart pointer vpDisplay specialization if a GUI library is available or nullptr otherwise.
vpDisplay * allocateDisplay()
Return a newly allocated vpDisplay specialization if a GUI library is available or nullptr otherwise.