#include <visp3/core/vpConfig.h>
#if defined(VISP_HAVE_CATCH2)
#include <visp3/core/vpIoTools.h>
#include <visp3/core/vpImageConvert.h>
#include <visp3/rbt/vpRBTracker.h>
#include <visp3/rbt/vpRBSilhouetteMeTracker.h>
#include <visp3/rbt/vpRBSilhouetteCCDTracker.h>
#include <visp3/rbt/vpRBKltTracker.h>
#include <visp3/rbt/vpRBDenseDepthTracker.h>
#include <visp3/ar/vpPanda3DFrameworkManager.h>
#include "test_utils.h"
#if defined(VISP_HAVE_NLOHMANN_JSON)
#include VISP_NLOHMANN_JSON(json.hpp)
#endif
#define CATCH_CONFIG_RUNNER
#include <catch_amalgamated.hpp>
#ifdef ENABLE_VISP_NAMESPACE
#endif
const std::string objCube =
"o Cube\n"
"v -0.050000 -0.050000 0.050000\n"
"v -0.050000 0.050000 0.050000\n"
"v -0.050000 -0.050000 -0.050000\n"
"v -0.050000 0.050000 -0.050000\n"
"v 0.050000 -0.050000 0.050000\n"
"v 0.050000 0.050000 0.050000\n"
"v 0.050000 -0.050000 -0.050000\n"
"v 0.050000 0.050000 -0.050000\n"
"vn -1.0000 -0.0000 -0.0000\n"
"vn -0.0000 -0.0000 -1.0000\n"
"vn 1.0000 -0.0000 -0.0000\n"
"vn -0.0000 -0.0000 1.0000\n"
"vn -0.0000 -1.0000 -0.0000\n"
"vn -0.0000 1.0000 -0.0000\n"
"vt 0.375000 0.000000\n"
"vt 0.375000 1.000000\n"
"vt 0.125000 0.750000\n"
"vt 0.625000 0.000000\n"
"vt 0.625000 1.000000\n"
"vt 0.875000 0.750000\n"
"vt 0.125000 0.500000\n"
"vt 0.375000 0.250000\n"
"vt 0.625000 0.250000\n"
"vt 0.875000 0.500000\n"
"vt 0.375000 0.750000\n"
"vt 0.625000 0.750000\n"
"vt 0.375000 0.500000\n"
"vt 0.625000 0.500000\n"
"s 0\n"
"f 2/4/1 3/8/1 1/1/1\n"
"f 4/9/2 7/13/2 3/8/2\n"
"f 8/14/3 5/11/3 7/13/3\n"
"f 6/12/4 1/2/4 5/11/4\n"
"f 7/13/5 1/3/5 3/7/5\n"
"f 4/10/6 6/12/6 8/14/6\n"
"f 2/4/1 4/9/1 3/8/1\n"
"f 4/9/2 8/14/2 7/13/2\n"
"f 8/14/3 6/12/3 5/11/3\n"
"f 6/12/4 2/5/4 1/2/4\n"
"f 7/13/5 5/11/5 1/3/5\n"
"f 4/10/6 2/6/6 6/12/6\n";
bool opt_no_display = false;
std::string createObjFile()
{
std::ofstream f(objFile);
f << objCube;
f.close();
return objFile;
}
SCENARIO("Instantiating a silhouette me tracker", "[rbt]")
{
GIVEN("A base me tracker")
{
WHEN("Changing mask parameters")
{
THEN("Enabling mask is seen")
{
bool useMaskDefault = tracker.shouldUseMask();
tracker.setShouldUseMask(!useMaskDefault);
REQUIRE(useMaskDefault != tracker.shouldUseMask());
}
THEN("Changing mask min confidence with a correct value is Ok")
{
tracker.setMinimumMaskConfidence(0.0);
REQUIRE(
tracker.getMinimumMaskConfidence() == 0.0);
tracker.setMinimumMaskConfidence(1.0);
REQUIRE(
tracker.getMinimumMaskConfidence() == 1.0);
tracker.setMinimumMaskConfidence(0.5);
REQUIRE(
tracker.getMinimumMaskConfidence() == 0.5);
}
THEN("Setting incorrect mask confidence value fails")
{
REQUIRE_THROWS(
tracker.setMinimumMaskConfidence(-1.0));
}
}
WHEN("Changing robust threshold")
{
THEN("Setting correct value works")
{
tracker.setMinRobustThreshold(0.5);
REQUIRE(
tracker.getMinRobustThreshold() == 0.5);
}
THEN("Setting negative value throws")
{
REQUIRE_THROWS(
tracker.setMinRobustThreshold(-0.5));
}
}
WHEN("Changing number of candidates")
{
THEN("Setting correct value works")
{
REQUIRE(
tracker.getNumCandidates() == 3);
}
THEN("Setting incorrect value throws")
{
REQUIRE_THROWS(
tracker.setNumCandidates(0));
}
}
WHEN("Changing convergence settings")
{
THEN("Setting correct single point value works")
{
tracker.setSinglePointConvergenceThreshold(1.0);
REQUIRE(
tracker.getSinglePointConvergenceThreshold() == 1.0);
}
THEN("Setting incorrect single point value throws")
{
REQUIRE_THROWS(
tracker.setSinglePointConvergenceThreshold(-1.0));
}
THEN("Setting correct global value works")
{
tracker.setGlobalConvergenceMinimumRatio(0.0);
REQUIRE(
tracker.getGlobalConvergenceMinimumRatio() == 0.0);
tracker.setGlobalConvergenceMinimumRatio(1.0);
REQUIRE(
tracker.getGlobalConvergenceMinimumRatio() == 1.0);
tracker.setGlobalConvergenceMinimumRatio(0.5);
REQUIRE(
tracker.getGlobalConvergenceMinimumRatio() == 0.5);
}
}
#if defined(VISP_HAVE_NLOHMANN_JSON)
WHEN("defining JSON parameters")
{
{"type", "silhouetteMe"},
{ "numCandidates", 1 },
{ "weight", 0.5 },
{ "convergencePixelThreshold", 0.5 },
{ "convergenceRatio", 0.99},
{ "useMask", true},
{ "minMaskConfidence", 0.5},
{ "movingEdge", {
{"maskSign", 0},
{"maskSize" , 5},
{"minSampleStep" , 4.0},
{"mu" , {0.5, 0.5}},
{"nMask" , 90},
{"ntotalSample" , 0},
{"pointsToTrack" , 200},
{"range" , 5},
{"sampleStep" , 4.0},
{"strip" , 2},
{"thresholdType" , "normalized"},
{"threshold" , 20.0}
}}
};
THEN("Loading correct settings works")
{
REQUIRE(
tracker.getNumCandidates() == 1);
REQUIRE(
tracker.shouldUseMask() ==
true);
REQUIRE(
tracker.getMinimumMaskConfidence() == 0.5);
REQUIRE(
tracker.getMe().getMaskNumber() == 90);
REQUIRE(
tracker.getMe().getThreshold() == 20.0);
}
THEN("Setting incorrect candidate number throws")
{
REQUIRE_THROWS(
tracker.loadJsonConfiguration(j));
}
THEN("Setting incorrect mask confidence throws")
{
j[
"minMaskConfidence"] = 5.0;
REQUIRE_THROWS(
tracker.loadJsonConfiguration(j));
}
THEN("Setting incorrect single point convergence vlaue confidence throws")
{
j[
"convergencePixelThreshold"] = -1.0;
REQUIRE_THROWS(
tracker.loadJsonConfiguration(j));
}
THEN("Setting incorrect global convergence vlaue confidence throws")
{
j[
"convergenceRatio"] = 2.0;
REQUIRE_THROWS(
tracker.loadJsonConfiguration(j));
}
}
#endif
}
}
SCENARIO("Instantiating a silhouette CCD tracker", "[rbt]")
{
GIVEN("A base ccd tracker")
{
WHEN("Setting smoothing factor")
{
THEN("Setting value above 0 works")
{
tracker.setTemporalSmoothingFactor(0.5);
REQUIRE(
tracker.getTemporalSmoothingFactor() == 0.5);
}
THEN("Setting value below 0 throws")
{
REQUIRE_THROWS(
tracker.setTemporalSmoothingFactor(-2.0));
}
}
WHEN("Updating CCD parameters")
{
THEN("Changes are propagated to tracker")
{
REQUIRE(
tracker.getCCDParameters().h == ccd.
h);
}
}
#if defined(VISP_HAVE_NLOHMANN_JSON)
WHEN("Defining associated json")
{
{"type", "silhouetteCCD"},
{"weight", 0.01},
{"temporalSmoothing", 0.1},
{"convergenceThreshold", 0.1},
{"ccd", {
{"h", 64},
{"delta_h", 16},
{"gamma", { 0.1, 0.2, 0.3, 0.4 } }
}}
};
THEN("Loading correct json works")
{
REQUIRE(
tracker.getTemporalSmoothingFactor() == 0.1);
}
THEN("Loading invalid temporal smoothing factor throws")
{
j[
"temporalSmoothing"] = -3.14;
REQUIRE_THROWS(
tracker.loadJsonConfiguration(j));
}
THEN("Loading invalid ccd gamma throws")
{
j[
"ccd"][
"gamma"] = -3.14;
REQUIRE_THROWS(
tracker.loadJsonConfiguration(j));
}
}
#endif
}
}
#if defined(VP_HAVE_RB_KLT_TRACKER)
SCENARIO("Instantiating KLT tracker")
{
WHEN("Modifying basic settings")
{
tracker.setFilteringMaxReprojectionError(0.024);
tracker.setMinimumDistanceNewPoints(0.005);
tracker.setMinimumNumberOfPoints(20);
tracker.setMinimumMaskConfidence(0.5);
THEN("Every change is visible")
{
REQUIRE(
tracker.getFilteringBorderSize() == 2);
REQUIRE(
tracker.getFilteringMaxReprojectionError() == 0.024);
REQUIRE(
tracker.getMinimumDistanceNewPoints() == 0.005);
REQUIRE(
tracker.getMinimumNumberOfPoints() == 20);
REQUIRE(
tracker.getMinimumMaskConfidence() == 0.5);
}
THEN("Setting incorrect Mask confidence throws")
{
REQUIRE_THROWS(
tracker.setMinimumMaskConfidence(-1.0));
}
}
#if defined(VISP_HAVE_NLOHMANN_JSON)
WHEN("Defining associated json")
{
{"type", "klt"},
{"weight", 0.01},
{"minimumNumPoints", 25},
{"newPointsMinPixelDistance", 5},
{"maxReprojectionErrorPixels", 0.01},
{"useMask", true},
{"minMaskConfidence", 0.1},
{ "windowSize", 7 },
{ "quality", 0.01 },
{ "maxFeatures", 500 }
};
THEN("Loading correct json works")
{
REQUIRE(
tracker.getMinimumNumberOfPoints() == 25);
REQUIRE(
tracker.getMinimumDistanceNewPoints() == 5);
REQUIRE(
tracker.getFilteringMaxReprojectionError() == 0.01);
REQUIRE(
tracker.shouldUseMask() ==
true);
REQUIRE(
tracker.getMinimumMaskConfidence() == 0.1f);
REQUIRE(
tracker.getKltTracker().getWindowSize() == 7);
REQUIRE(
tracker.getKltTracker().getQuality() == 0.01);
REQUIRE(
tracker.getKltTracker().getMaxFeatures() == 500);
}
THEN("Loading invalid mask confidence throws")
{
j[
"minMaskConfidence"] = -3.14;
REQUIRE_THROWS(
tracker.loadJsonConfiguration(j));
}
}
#endif
}
#endif
SCENARIO("Instantiating depth tracker", "[rbt]")
{
WHEN("Setting steps")
{
THEN("Setting positive value works")
{
}
THEN("Setting 0 step is invalid")
{
REQUIRE_THROWS(
tracker.setStep(0));
}
}
WHEN("Setting confidence")
{
THEN("Setting incorrect mask confidence value")
{
REQUIRE_THROWS(
tracker.setMinimumMaskConfidence(-1.0));
}
THEN("Setting correct mask confidence value")
{
tracker.setMinimumMaskConfidence(0.8);
REQUIRE(
tracker.getMinimumMaskConfidence() == 0.8f);
}
THEN("Toggling mask works")
{
}
}
#if defined(VISP_HAVE_NLOHMANN_JSON)
WHEN("Defining associated json")
{
{"type", "klt"},
{"weight", 0.01},
{"step", 16},
{"useMask", true},
{"minMaskConfidence", 0.1}
};
THEN("Loading correct json works")
{
REQUIRE(
tracker.getMinimumMaskConfidence() == 0.1f);
}
THEN("Loading invalid mask confidence throws")
{
j[
"minMaskConfidence"] = -3.14;
REQUIRE_THROWS(
tracker.loadJsonConfiguration(j));
}
THEN("Loading invalid step throws")
{
REQUIRE_THROWS(
tracker.loadJsonConfiguration(j));
}
}
#endif
}
SCENARIO("Instantiating a render-based tracker", "[rbt]")
{
WHEN("Setting optimization parameters")
{
THEN("Max num iter cannot be zero")
{
REQUIRE_THROWS(
tracker.setMaxOptimizationIters(0));
}
THEN("Setting num iter is ok")
{
tracker.setMaxOptimizationIters(10);
REQUIRE(
tracker.getMaxOptimizationIters() == 10);
}
THEN("Gain cannot be negative")
{
REQUIRE_THROWS(
tracker.setOptimizationGain(-0.5));
}
THEN("Positive gain is ok")
{
REQUIRE(
tracker.getOptimizationGain() == 0.5);
}
THEN("Initial mu cannot be negative")
{
REQUIRE_THROWS(
tracker.setOptimizationInitialMu(-0.5));
}
THEN("Initial mu can be zero (gauss newton)")
{
tracker.setOptimizationInitialMu(0.0);
REQUIRE(
tracker.getOptimizationInitialMu() == 0.0);
}
THEN("Initial mu can be above zero")
{
tracker.setOptimizationInitialMu(0.1);
REQUIRE(
tracker.getOptimizationInitialMu() == 0.1);
}
THEN("Mu factor cannot be negative")
{
REQUIRE_THROWS(
tracker.setOptimizationMuIterFactor(-0.5));
}
THEN("Mu factor can be zero")
{
tracker.setOptimizationMuIterFactor(0.0);
REQUIRE(
tracker.getOptimizationMuIterFactor() == 0.0);
}
THEN("Mu factor can be positive")
{
tracker.setOptimizationMuIterFactor(0.1);
REQUIRE(
tracker.getOptimizationMuIterFactor() == 0.1);
}
}
WHEN("Setting camera parameters and resolution")
{
unsigned int h = 480,
w = 640;
THEN("Image height cannot be zero")
{
REQUIRE_THROWS(
tracker.setCameraParameters(cam, 0, w));
}
THEN("Image width cannot be zero")
{
REQUIRE_THROWS(
tracker.setCameraParameters(cam, h, 0));
}
THEN("Camera model cannot have distortion")
{
cam.initPersProjWithDistortion(600, 600, 320, 240, 0.01, 0.01);
REQUIRE_THROWS(
tracker.setCameraParameters(cam, h, w));
}
THEN("Loading with perspective model with no distortion and correct resolution is ok")
{
tracker.setCameraParameters(cam, h, w);
REQUIRE(
tracker.getCameraParameters() == cam);
REQUIRE(
tracker.getImageHeight() == h);
REQUIRE(
tracker.getImageWidth() == w);
}
}
#if defined(VISP_HAVE_NLOHMANN_JSON)
WHEN("Loading JSON configuration")
{
const std::string jsonLiteral = R"JSON({
"camera": {
"intrinsics": {
"model": "perspectiveWithoutDistortion",
"px" : 302.573,
"py" : 302.396,
"u0" : 162.776,
"v0" : 122.475
},
"height": 240,
"width" : 320
},
"vvs": {
"gain": 1.0,
"maxIterations" : 10,
"mu": 0.5,
"muIterFactor": 0.1
},
"model" : "path/to/model.obj",
"silhouetteExtractionSettings" : {
"threshold": {
"type": "relative",
"value" : 0.1
},
"sampling" : {
"type": "fixed",
"samplingRate": 2,
"numPoints" : 128,
"reusePreviousPoints": true
}
},
"features": [
{
"type": "silhouetteMe",
"weight" : 0.5,
"numCandidates" : 3,
"convergencePixelThreshold" : 3,
"convergenceRatio" : 0.99,
"movingEdge" : {
"maskSign": 0,
"maskSize" : 5,
"minSampleStep" : 4.0,
"mu" : [
0.5,
0.5
] ,
"nMask" : 90,
"ntotalSample" : 0,
"pointsToTrack" : 200,
"range" : 5,
"sampleStep" : 4.0,
"strip" : 2,
"thresholdType" : "normalized",
"threshold" : 20.0
}
},
{
"type": "silhouetteColor",
"weight" : 0.5,
"convergenceThreshold" : 0.1,
"temporalSmoothing" : 0.1,
"ccd" : {
"h": 4,
"delta_h" : 1
}
}
],
"verbose": {
"enabled": true
}
})JSON";
const auto verifyBase = [&
tracker]() {
REQUIRE((
tracker.getImageHeight() == 240 &&
tracker.getImageWidth() == 320));
REQUIRE((
tracker.getOptimizationGain() == 1.0 &&
tracker.getMaxOptimizationIters() == 10));
REQUIRE((
tracker.getOptimizationGain() == 1.0 &&
tracker.getMaxOptimizationIters() == 10));
REQUIRE((
tracker.getOptimizationInitialMu() == 0.5 &&
tracker.getOptimizationMuIterFactor() == 0.1));
};
nlohmann::json
j = nlohmann::json::parse(jsonLiteral);
THEN("Loading configuration with trackers")
{
verifyBase();
REQUIRE(
tracker.getModelPath() ==
"path/to/model.obj");
if (!opt_no_display) {
AND_THEN("Initializing tracking fails since object does not exist")
{
REQUIRE_THROWS(
tracker.startTracking());
}
}
}
THEN("Loading configuration without model also works")
{
verifyBase();
REQUIRE(
tracker.getModelPath() ==
"");
if (!opt_no_display) {
AND_THEN("Initializing tracking fails since path is not specified")
{
REQUIRE_THROWS(
tracker.startTracking());
}
}
}
THEN("Loading configuration with real 3D model also works")
{
std::string objFile = createObjFile();
verifyBase();
REQUIRE(
tracker.getModelPath() == objFile);
if (!opt_no_display) {
AND_THEN("Initializing tracker works")
{
REQUIRE_NOTHROW(
tracker.startTracking());
}
}
}
}
WHEN("Adding trackers")
{
THEN("Adding nullptr is not allowed")
{
REQUIRE_THROWS(
tracker.addTracker(
nullptr));
}
THEN("Adding a tracker works")
{
auto ccdTracker = std::make_shared<vpRBSilhouetteCCDTracker>();
}
}
#endif
}
SCENARIO("Running tracker on static synthetic sequences", "[rbt]")
{
if (opt_no_display) {
std::cout << "Display is disabled for tests, skipping..." << std::endl;
}
else {
unsigned int h = 480,
w = 640;
std::string objFile = createObjFile();
renderer.addNodeToScene(renderer.loadObject("object", objFile));
};
const unsigned int n = 100;
std::vector<vpHomogeneousMatrix>
cTw;
std::vector<vpHomogeneousMatrix> oTw;
for (
unsigned int i = 0;
i < n; ++
i) {
cTw.push_back(
vpHomogeneousMatrix(0.0, 0.001 *
static_cast<double>(i), 0.3 + 0.001 *
static_cast<double>(i), 0.0, 0.0, 0.0));
}
TrajectoryData traj1 = generateTrajectory(renderParams, setupScene, cTw, oTw);
tracker.setCameraParameters(cam, h, w);
std::shared_ptr<vpRBSilhouetteCCDTracker> silTracker = std::make_shared<vpRBSilhouetteCCDTracker>();
silTracker->setTemporalSmoothingFactor(0.1);
silTracker->setCCDParameters(ccdParams);
tracker.setSilhouetteExtractionParameters(silhouetteSettings);
tracker.setMaxOptimizationIters(10);
tracker.setOptimizationInitialMu(0.01);
for (
unsigned int i = 0;
i < traj1.
cTo.size(); ++
i) {
std::cout <<
"Translation error = " << errorT <<
" m" <<
", rotation error = " <<
vpMath::deg(errorR) <<
" deg" << std::endl;
REQUIRE((errorT < 0.005 && errorR <
vpMath::deg(2.1)));
}
}
}
SCENARIO("Checking ADD convergence metric", "[rbt]")
{
if (opt_no_display) {
std::cout << "Display is disabled for tests, skipping..." << std::endl;
return;
}
GIVEN("A renderer and a convergence metric")
{
renderer.initFramework();
renderer.addObjectToScene("obj", createObjFile());
renderer.setFocusedObject("obj");
THEN("Trying to compute metric without sampling fails")
{
REQUIRE_THROWS(metric(cam, cTo1, cTo2NotConv));
}
THEN("Sampling and testing against various threshold works")
{
metric.sampleObject(renderer);
double metricValue = metric(cam, cTo1, cTo2NotConv);
REQUIRE(fabs(metricValue - 0.005) < 1e-4);
REQUIRE(!metric.hasConverged(cam, cTo1, cTo2NotConv));
REQUIRE(!metric.shouldUpdateRender(cam, cTo1, cTo2NotConv));
metricValue = metric(cam, cTo1, cTo2Conv);
REQUIRE(fabs(metricValue - 0.0001) < 1e-4);
REQUIRE(metric.hasConverged(cam, cTo1, cTo2Conv));
REQUIRE(!metric.shouldUpdateRender(cam, cTo1, cTo2Conv));
metricValue = metric(cam, cTo1, cTo2Render);
REQUIRE(fabs(metricValue - 0.02) < 1e-4);
REQUIRE(!metric.hasConverged(cam, cTo1, cTo2Render));
REQUIRE(metric.shouldUpdateRender(cam, cTo1, cTo2Render));
}
}
}
SCENARIO("Testing point map", "[rbt]")
{
map.setThresholdNormalVisibiltyCriterion(45.0);
REQUIRE(map.getNumMaxPoints() == 512);
REQUIRE(
vpMath::equal(map.getMinDistanceAddNewPoints(), 0.0, 1e-6));
REQUIRE(
vpMath::equal(map.getOutlierReprojectionErrorThreshold(), 2.0, 1e-6));
REQUIRE(
vpMath::equal(map.getMaxDepthErrorCandidate(), 0.02, 1e-6));
REQUIRE(
vpMath::equal(map.getThresholdNormalVisibiltyCriterion(), 45.0, 1e-6));
REQUIRE(map.getPoints().getRows() == 0);
unsigned int h = 480,
w = 640;
std::vector<int> removedIndices;
unsigned int N = 100;
for (
unsigned int i = 0;
i < N;
i++) {
bool good = false;
double Z;
while (!good) {
baseUV[
i][0] = random.uniform(
static_cast<double>(w) / 5 * 2,
static_cast<double>(w) / 5 * 3);
baseUV[
i][1] = random.uniform(
static_cast<double>(h) / 5 * 2,
static_cast<double>(h) / 5 * 3);
unsigned uu =
static_cast<unsigned int>(baseUV[
i][0]), vu =
static_cast<unsigned int>(baseUV[i][1]);
if (depthImage[vu][uu] > 0.0) {
good = false;
}
else {
Z = 0.5 + random.uniform(-0.05, 0.05);
for (
int i = -1;
i < 2; ++
i) {
for (
int j = -1;
j < 2; ++
j) {
depthImage[vu +
i][uu +
j] = Z;
}
}
good = true;
}
}
cX[
i][0] = baseXY[
i][0] * Z, cX[
i][1] = baseXY[
i][1] * Z, cX[
i][2] = Z;
cN[
i][0] = 0.0, cN[
i][1] = 0.0, cN[
i][2] = -1.0;
c[0] = cX[
i][0], c[1] = cX[
i][1], c[2] = cX[
i][2];
oX[
i][0] = ox[0] / ox[3], oX[
i][1] = ox[1] / ox[3], oX[
i][2] = ox[2] / ox[3];
const vpColVector on = cTo.inverse().getRotationMatrix() * cN.getRow(i).
t();
oN[
i][0] = on[0], oN[
i][1] = on[1], oN[
i][2] = on[2];
}
std::cout << oN << std::endl;
unsigned int numAddedPoints;
map.updatePoints(indicesToRemove, oX, oN, removedIndices, numAddedPoints);
REQUIRE(numAddedPoints == N);
for (
unsigned int i = 0;
i < N; ++
i) {
}
map.project(cam, allPoints, cTo, reprojcX, reprojXY, reprojUV);
REQUIRE(((baseUV - reprojUV).frobeniusNorm() / (N * 2)) < 1e-3);
REQUIRE(((baseXY - reprojXY).frobeniusNorm() / (N * 2)) < 1e-3);
REQUIRE(((cX - reprojcX).frobeniusNorm() / (N * 3)) < 1e-3);
std::vector<int> visibleIndices;
map.getVisiblePoints(h, w, cam, cTo, depthImage, visibleIndices);
REQUIRE(visibleIndices.size() == N);
map.getVisiblePoints(h, w, cam, cTo *
vpHomogeneousMatrix(0.0, 0.0, map.getMaxDepthErrorVisibilityCriterion() + 0.01, 0.0, 0.0, 0.0), depthImage, visibleIndices);
REQUIRE(visibleIndices.size() == 0);
map.getVisiblePoints(h, w, cam,
vpHomogeneousMatrix(0.0, 0.0, map.getMaxDepthErrorVisibilityCriterion() * 0.9, 0.0, 0.0, 0.0) * cTo, depthImage, visibleIndices);
REQUIRE(visibleIndices.size() == N);
map.setMaxDepthErrorVisibilityCriterion(10);
map.getVisiblePoints(h, w, cam, cTo *
vpHomogeneousMatrix(0.0, 0.0, 0.0, 0.0,
vpMath::rad(map.getThresholdNormalVisibiltyCriterion() + 10), 0.0), depthImage, visibleIndices);
REQUIRE(visibleIndices.size() == 0);
std::cout << map.getThresholdNormalVisibiltyCriterion() << std::endl;
map.getVisiblePoints(h, w, cam, cTo *
vpHomogeneousMatrix(0.0, 0.0, 0.0, 0.0,
vpMath::rad(map.getThresholdNormalVisibiltyCriterion() - 10), 0.0), depthImage, visibleIndices);
REQUIRE(visibleIndices.size() == N);
map.setThresholdNormalVisibiltyCriterion(180);
map.getVisiblePoints(h, w, cam, cTo *
vpHomogeneousMatrix(0.0, 0.0, 0.0, 0.0,
vpMath::rad(map.getThresholdNormalVisibiltyCriterion() + 10), 0.0), depthImage, visibleIndices);
REQUIRE(visibleIndices.size() == N);
}
int main(int argc, char *argv[])
{
Catch::Session session;
auto cli = session.cli()
| Catch::Clara::Opt(opt_no_display)["--no-display"]("Disable display");
session.cli(cli);
const int returnCode = session.applyCommandLine(argc, argv);
if (returnCode != 0) {
return returnCode;
}
const int numFailed = session.run();
return numFailed;
}
#else
int main()
{
return EXIT_SUCCESS;
}
#endif
Implementation of a generic 2D array used as base class for matrices and vectors.
double gamma_2
Curve uncertainty computation hyperparameter. Recommended to leave fixed.
double gamma_3
Curve uncertainty computation hyperparameter. Recommended to leave fixed.
double gamma_1
Curve uncertainty computation hyperparameter. Recommended to leave fixed.
int delta_h
Sample step when computing statistics and errors. Increase this value to decrease computation time,...
int h
Size of the vicinity that is used to compute statistics and error. Length of the line along the norma...
double gamma_4
Curve uncertainty computation hyperparameter. Recommended to leave fixed.
Generic class defining intrinsic camera parameters.
Implementation of column vector and the associated operations.
Implementation of an homogeneous matrix and operations on such kind of matrices.
vpThetaUVector getThetaUVector() const
vpTranslationVector getTranslationVector() const
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
Definition of the vpImage class member functions.
static double rad(double deg)
static bool equal(double x, double y, double threshold=0.001)
static double deg(double rad)
Implementation of a matrix and operations on matrices.
Single object focused renderer.
Class representing an ambient light.
static vpPanda3DFrameworkManager & getInstance()
Rendering parameters for a panda3D simulation.
Class that renders multiple datatypes, in a single pass. A renderer set contains multiple subrenderer...
static void convertPoint(const vpCameraParameters &cam, const double &u, const double &v, double &x, double &y)
A tracker based on dense depth point-plane alignment.
Tracking based on the Contracting Curve Density algorithm.
Moving edge feature tracking from depth-extracted object contours.
Class implementing the Render-Based Tracker (RBT).
double frobeniusNorm() const
Class for generating random numbers with uniform probability density.
std::vector< vpHomogeneousMatrix > cTo
std::vector< vpImage< float > > depth
std::vector< vpImage< vpRGBa > > rgb