63#include <visp3/core/vpCameraParameters.h>
64#include <visp3/core/vpConfig.h>
65#include <visp3/detection/vpDetectorAprilTag.h>
66#include <visp3/gui/vpDisplayFactory.h>
67#include <visp3/gui/vpPlot.h>
68#include <visp3/io/vpImageIo.h>
69#include <visp3/robot/vpRobotAfma6.h>
70#include <visp3/sensor/vpRealSense2.h>
71#include <visp3/visual_features/vpFeatureThetaU.h>
72#include <visp3/visual_features/vpFeatureTranslation.h>
73#include <visp3/vs/vpServo.h>
74#include <visp3/vs/vpServoDisplay.h>
75#include <visp3/core/vpImageFilter.h>
76#include <visp3/io/vpVideoWriter.h>
79#if defined(VISP_HAVE_REALSENSE2) && ((__cplusplus >= 201703L) || (defined(_MSVC_LANG) && (_MSVC_LANG >= 201703L))) && \
80 defined(VISP_HAVE_DISPLAY) && defined(VISP_HAVE_AFMA6) && defined(VISP_HAVE_MODULE_DNN_TRACKER)
83#include <visp3/io/vpJsonArgumentParser.h>
84#include <visp3/dnn_tracker/vpMegaPoseTracker.h>
86#ifdef VISP_HAVE_NLOHMANN_JSON
87using json = nlohmann::json;
90#ifdef ENABLE_VISP_NAMESPACE
94std::optional<vpRect> detectObjectForInitMegaposeClick(
const vpImage<vpRGBa> &I)
100 if (startLabelling) {
121int main(
int argc,
const char *argv[])
123 bool opt_verbose =
true;
124 bool opt_plot =
true;
125 double convergence_threshold_t = 0.0005;
126 double convergence_threshold_tu = 0.5;
129 std::string megaposeAddress =
"127.0.0.1";
130 unsigned megaposePort = 5555;
131 int refinerIterations = 1, coarseNumSamples = 1024;
132 std::string objectName =
"";
134 std::string desiredPosFile =
"desired.pos";
135 std::string initialPosFile =
"init.pos";
137#ifdef VISP_HAVE_NLOHMANN_JSON
140 .addArgument(
"initialPose", initialPosFile,
true,
"Path to the file that contains that the desired pose. Can be acquired with Afma6_office.")
141 .addArgument(
"desiredPose", desiredPosFile,
true,
"Path to the file that contains that the desired pose. Can be acquired with Afma6_office.")
142 .addArgument(
"object", objectName,
true,
"Name of the object to track with megapose.")
143 .addArgument(
"megapose/address", megaposeAddress,
true,
"IP address of the Megapose server.")
144 .addArgument(
"megapose/port", megaposePort,
true,
"Port on which the Megapose server listens for connections.")
145 .addArgument(
"megapose/refinerIterations", refinerIterations,
false,
"Number of Megapose refiner model iterations."
146 "A higher count may lead to better accuracy, at the cost of more processing time")
147 .addArgument(
"megapose/initialisationNumSamples", coarseNumSamples,
false,
"Number of Megapose renderings used for the initial pose estimation.");
153#if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
154 std::shared_ptr<vpDisplay> display;
159 std::cout <<
"WARNING: This example will move the robot! "
160 <<
"Please make sure to have the user stop button at hand!" << std::endl
161 <<
"Press Enter to continue..." << std::endl;
163 std::vector<vpColVector> velocities;
164 std::vector<vpPoseVector>
error;
173 robot.setPositioningVelocity(10.0);
174 robot.readPosFile(desiredPosFile, q);
177 std::cout <<
"Move to joint position: " << q.t() << std::endl;
183 config.enable_stream(RS2_STREAM_COLOR, width, height, RS2_FORMAT_RGBA8, 30);
189 std::cout <<
"cam:\n" <<
cam << std::endl;
191 std::shared_ptr<vpMegaPose> megapose;
193 megapose = std::make_shared<vpMegaPose>(megaposeAddress, megaposePort, cam, height, width);
200 megapose->setCoarseNumSamples(coarseNumSamples);
201 const std::vector<std::string> allObjects = megapose->getObjectNames();
202 if (std::find(allObjects.begin(), allObjects.end(), objectName) == allObjects.end()) {
205 std::future<vpMegaPoseEstimate> trackerFuture;
209#if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
215 std::optional<vpRect> detection;
219 detection = detectObjectForInitMegaposeClick(I);
226 robot.readPosFile(initialPosFile, q);
229 std::cout <<
"Move to joint position: " << q.t() << std::endl;
233 detection = std::nullopt;
237 detection = detectObjectForInitMegaposeClick(I);
240 auto est = megaposeTracker.init(I, *detection).get();
242 std::cout <<
"Estimate score = " << est.score << std::endl;
258 task.addFeature(t, td);
259 task.addFeature(tu, tud);
268 plotter =
new vpPlot(2,
static_cast<int>(250 * 2), 500,
static_cast<int>(I.getWidth()) + 80, 10,
269 "Real time curves plotter");
270 plotter->setTitle(0,
"Visual features error");
271 plotter->setTitle(1,
"Camera velocities");
274 plotter->setLegend(0, 0,
"error_feat_tx");
275 plotter->setLegend(0, 1,
"error_feat_ty");
276 plotter->setLegend(0, 2,
"error_feat_tz");
277 plotter->setLegend(0, 3,
"error_feat_theta_ux");
278 plotter->setLegend(0, 4,
"error_feat_theta_uy");
279 plotter->setLegend(0, 5,
"error_feat_theta_uz");
280 plotter->setLegend(1, 0,
"vc_x");
281 plotter->setLegend(1, 1,
"vc_y");
282 plotter->setLegend(1, 2,
"vc_z");
283 plotter->setLegend(1, 3,
"wc_x");
284 plotter->setLegend(1, 4,
"wc_y");
285 plotter->setLegend(1, 5,
"wc_z");
288 bool final_quit =
false;
289 bool has_converged =
false;
290 bool send_velocities =
false;
299 bool callMegapose =
true;
302 while (!has_converged && !final_quit) {
307 if (!callMegapose && trackerFuture.wait_for(std::chrono::milliseconds(0)) == std::future_status::ready) {
308 megaposeEstimate = trackerFuture.get();
310 cTo = megaposeEstimate.
cTo;
312 if (megaposeEstimate.
score < 0.2) {
314 std::cout <<
"Low confidence, exiting" << std::endl;
319 std::cout <<
"Calling megapose" << std::endl;
320 trackerFuture = megaposeTracker.track(I);
321 callMegapose =
false;
324 std::stringstream ss;
325 ss <<
"Left click to " << (send_velocities ?
"stop the robot" :
"servo the robot") <<
", right click to quit.";
332 v =
task.computeControlLaw();
333 velocities.push_back(v);
338 cTw = robot.get_fMc(q).inverse();
339 cdTc_true = cdTw *
cTw.inverse();
341 error.push_back(cdrc);
349 plotter->plot(1, iter_plot, v);
354 std::cout <<
"v: " <<
v.t() << std::endl;
359 double error_tr = sqrt(cd_t_c.
sumSquare());
363 double error_tr_true = sqrt(cd_t_c_true.
sumSquare());
367 ss <<
"Predicted error_t: " << error_tr <<
", True error_t:" << error_tr_true;
370 ss <<
"Predicted error_tu: " << error_tu <<
", True error_tu:" << error_tu_true;
374 std::cout <<
"error translation: " << error_tr <<
" ; error rotation: " << error_tu << std::endl;
376 if (error_tr < convergence_threshold_t && error_tu < convergence_threshold_tu) {
377 has_converged =
true;
378 std::cout <<
"Servo task has converged" << std::endl;
397 send_velocities = !send_velocities;
410 std::cout <<
"Stop the robot " << std::endl;
413#ifdef VISP_HAVE_NLOHMANN_JSON
416 {
"velocities", velocities},
419 std::ofstream jsonFile;
420 jsonFile.open(
"results.json");
421 jsonFile <<
j.dump(4);
425 if (opt_plot && plotter !=
nullptr) {
431 while (!final_quit) {
447 std::cout <<
"ViSP exception: " <<
e.what() << std::endl;
448 std::cout <<
"Stop the robot " << std::endl;
450#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
451 if (display !=
nullptr) {
457 catch (
const std::exception &e) {
458 std::cout <<
"ur_rtde exception: " <<
e.what() << std::endl;
459#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
460 if (display !=
nullptr) {
467#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
468 if (display !=
nullptr) {
477#if !defined(VISP_HAVE_REALSENSE2)
478 std::cout <<
"Install librealsense-2.x" << std::endl;
480#if !((__cplusplus >= 201703L) || (defined(_MSVC_LANG) && (_MSVC_LANG >= 201703L)))
481 std::cout <<
"Build ViSP with c++17 or higher compiler flag (cmake -DUSE_CXX_STANDARD=17)." << std::endl;
483#if !defined(VISP_HAVE_AFMA6)
484 std::cout <<
"ViSP is not built with Afma-6 robot support..." << std::endl;
Generic class defining intrinsic camera parameters.
@ perspectiveProjWithoutDistortion
Perspective projection without distortion model.
Implementation of column vector and the associated operations.
static const vpColor none
static const vpColor yellow
Class that defines generic functionalities for display.
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayFrame(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, double size, const vpColor &color=vpColor::none, unsigned int thickness=1, const vpImagePoint &offset=vpImagePoint(0, 0), const std::string &frameName="", const vpColor &textColor=vpColor::black, const vpImagePoint &textOffset=vpImagePoint(15, 15))
static void getImage(const vpImage< unsigned char > &Is, vpImage< vpRGBa > &Id)
static void displayCross(const vpImage< unsigned char > &I, const vpImagePoint &ip, unsigned int size, const vpColor &color, unsigned int thickness=1)
static void flush(const vpImage< unsigned char > &I)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
@ badValue
Used to indicate that a value is not in the allowed range.
Class that defines a 3D visual feature from a axis/angle parametrization that represent the rotati...
Class that defines the translation visual feature .
Implementation of an homogeneous matrix and operations on such kind of matrices.
vpThetaUVector getThetaUVector() const
VP_DEPRECATED void init()
vpHomogeneousMatrix inverse() const
vpTranslationVector getTranslationVector() const
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition of the vpImage class member functions.
Command line argument parsing with support for JSON files. If a JSON file is supplied,...
static double deg(double rad)
A simplified interface to track a single object with MegaPose. This tracker works asynchronously: A c...
This class enables real time drawing of 2D or 3D graphics. An instance of the class open a window whi...
Implementation of a pose vector and operations on poses.
vpCameraParameters getCameraParameters(const rs2_stream &stream, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithDistortion, int index=-1) const
void acquire(vpImage< unsigned char > &grey, double *ts=nullptr)
bool open(const rs2::config &cfg=rs2::config())
Defines a rectangle in the plane.
Control of Irisa's gantry robot named Afma6.
@ STATE_POSITION_CONTROL
Initialize the position controller.
@ STATE_VELOCITY_CONTROL
Initialize the velocity controller.
@ STATE_STOP
Stops robot motion especially in velocity and acceleration control.
Implementation of a rotation vector as axis-angle minimal representation.
Class that consider the case of a translation vector.
Class that enables to write easily a video file or a sequence of images.
void saveFrame(vpImage< vpRGBa > &I)
void setFileName(const std::string &filename)
void open(vpImage< vpRGBa > &I)
std::shared_ptr< vpDisplay > createDisplay()
Return a smart pointer vpDisplay specialization if a GUI library is available or nullptr otherwise.
vpDisplay * allocateDisplay()
Return a newly allocated vpDisplay specialization if a GUI library is available or nullptr otherwise.
VISP_EXPORT double measureTimeMs()