204 EDGE_TRACKER = 1 << 0,
205#if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO))
206 KLT_TRACKER = 1 << 1,
208 DEPTH_NORMAL_TRACKER = 1 << 2,
209 DEPTH_DENSE_TRACKER = 1 << 3
215 vpMbGenericTracker(
const std::vector<std::string> &cameraNames,
const std::vector<int> &trackerTypes);
225 const vpColor &col,
unsigned int thickness = 1,
bool displayFullModel =
false);
227 const vpColor &col,
unsigned int thickness = 1,
bool displayFullModel =
false);
232 bool displayFullModel =
false);
235 const vpColor &color,
unsigned int thickness = 1,
bool displayFullModel =
false);
238 const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses,
239 const std::map<std::string, vpCameraParameters> &mapOfCameraParameters,
const vpColor &col,
240 unsigned int thickness = 1,
bool displayFullModel =
false);
242 const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses,
243 const std::map<std::string, vpCameraParameters> &mapOfCameraParameters,
const vpColor &col,
244 unsigned int thickness = 1,
bool displayFullModel =
false);
246 virtual std::vector<std::string> getCameraNames()
const;
251 virtual void getCameraParameters(std::map<std::string, vpCameraParameters> &mapOfCameraParameters)
const;
253 virtual std::map<std::string, int> getCameraTrackerTypes()
const;
256 virtual void getClipping(
unsigned int &clippingFlag1,
unsigned int &clippingFlag2)
const;
257 virtual void getClipping(std::map<std::string, unsigned int> &mapOfClippingFlags)
const;
264#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
265 virtual std::list<vpMbtDistanceCircle *> &getFeaturesCircle();
266 virtual std::list<vpMbtDistanceKltCylinder *> &getFeaturesKltCylinder();
267 virtual std::list<vpMbtDistanceKltPoints *> &getFeaturesKlt();
270 virtual std::vector<std::vector<double> > getFeaturesForDisplay();
271 virtual void getFeaturesForDisplay(std::map<std::string, std::vector<std::vector<double> > > &mapOfFeatures);
273 virtual double getGoodMovingEdgesRatioThreshold()
const;
275#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
276 virtual std::vector<vpImagePoint> getKltImagePoints()
const;
277 virtual std::map<int, vpImagePoint> getKltImagePointsWithId()
const;
279 virtual unsigned int getKltMaskBorder()
const;
280 virtual int getKltNbPoints()
const;
284 virtual void getKltOpencv(std::map<std::string, vpKltOpencv> &mapOfKlts)
const;
286#if defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
287 virtual std::vector<cv::Point2f> getKltPoints()
const;
290 virtual double getKltThresholdAcceptation()
const;
293 virtual void getLcircle(std::list<vpMbtDistanceCircle *> &circlesList,
unsigned int level = 0)
const;
294 virtual void getLcircle(
const std::string &cameraName, std::list<vpMbtDistanceCircle *> &circlesList,
295 unsigned int level = 0)
const;
296 virtual void getLcylinder(std::list<vpMbtDistanceCylinder *> &cylindersList,
unsigned int level = 0)
const;
297 virtual void getLcylinder(
const std::string &cameraName, std::list<vpMbtDistanceCylinder *> &cylindersList,
298 unsigned int level = 0)
const;
299 virtual void getLline(std::list<vpMbtDistanceLine *> &linesList,
unsigned int level = 0)
const;
300 virtual void getLline(
const std::string &cameraName, std::list<vpMbtDistanceLine *> &linesList,
301 unsigned int level = 0)
const;
303 virtual std::vector<std::vector<double> >
getModelForDisplay(
unsigned int width,
unsigned int height,
306 bool displayFullModel =
false);
307 virtual void getModelForDisplay(std::map<std::string, std::vector<std::vector<double> > > &mapOfModels,
308 const std::map<std::string, unsigned int> &mapOfwidths,
309 const std::map<std::string, unsigned int> &mapOfheights,
310 const std::map<std::string, vpHomogeneousMatrix> &mapOfcMos,
311 const std::map<std::string, vpCameraParameters> &mapOfCams,
312 bool displayFullModel =
false);
314 virtual vpMe getMovingEdge()
const;
315 virtual void getMovingEdge(
vpMe &me1,
vpMe &me2)
const;
316 virtual void getMovingEdge(std::map<std::string, vpMe> &mapOfMovingEdges)
const;
337 virtual unsigned int getNbPoints(
unsigned int level = 0)
const;
338 virtual void getNbPoints(std::map<std::string, unsigned int> &mapOfNbPoints,
unsigned int level = 0)
const;
341 virtual void getNbPolygon(std::map<std::string, unsigned int> &mapOfNbPolygons)
const;
346 virtual std::pair<std::vector<vpPolygon>, std::vector<std::vector<vpPoint> > >
347 getPolygonFaces(
bool orderPolygons =
true,
bool useVisibility =
true,
bool clipPolygon =
false);
348 virtual void getPolygonFaces(std::map<std::string, std::vector<vpPolygon> > &mapOfPolygons,
349 std::map<std::string, std::vector<std::vector<vpPoint> > > &mapOfPoints,
350 bool orderPolygons =
true,
bool useVisibility =
true,
bool clipPolygon =
false);
355 virtual void getPose(std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
const;
357 virtual std::string getReferenceCameraName()
const;
361 virtual int getTrackerType()
const;
365#ifdef VISP_HAVE_MODULE_GUI
368 const std::string &initFile1,
const std::string &initFile2,
bool displayHelp =
false,
372 const std::string &initFile2,
bool displayHelp =
false,
378 const std::map<std::string, std::string> &mapOfInitFiles,
bool displayHelp =
false,
379 const std::map<std::string, vpHomogeneousMatrix> &mapOfT = std::map<std::string, vpHomogeneousMatrix>());
382 const std::map<std::string, std::string> &mapOfInitFiles,
bool displayHelp =
false,
383 const std::map<std::string, vpHomogeneousMatrix> &mapOfT = std::map<std::string, vpHomogeneousMatrix>());
388 const std::string &initFile1,
const std::string &initFile2);
390 const std::string &initFile1,
const std::string &initFile2);
393 const std::map<std::string, std::string> &mapOfInitPoints);
395 const std::map<std::string, std::string> &mapOfInitPoints);
400 const std::string &initFile1,
const std::string &initFile2);
402 const std::string &initFile1,
const std::string &initFile2);
405 const std::map<std::string, std::string> &mapOfInitPoses);
407 const std::map<std::string, std::string> &mapOfInitPoses);
415 const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses);
417 const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses);
419 virtual void loadConfigFile(
const std::string &configFile,
bool verbose =
true);
420 virtual void loadConfigFile(
const std::string &configFile1,
const std::string &configFile2,
bool verbose =
true);
421 virtual void loadConfigFile(
const std::map<std::string, std::string> &mapOfConfigFiles,
bool verbose =
true);
423#ifdef VISP_HAVE_NLOHMANN_JSON
424 virtual void saveConfigFile(
const std::string &settingsFile)
const;
427 virtual void loadModel(
const std::string &modelFile,
bool verbose =
false,
429 virtual void loadModel(
const std::string &modelFile1,
const std::string &modelFile2,
bool verbose =
false,
434 loadModel(
const std::map<std::string, std::string> &mapOfModelFiles,
bool verbose =
false,
435 const std::map<std::string, vpHomogeneousMatrix> &mapOfT = std::map<std::string, vpHomogeneousMatrix>());
443 const std::string &cad_name1,
const std::string &cad_name2,
const vpHomogeneousMatrix &c1Mo,
448 const std::string &cad_name1,
const std::string &cad_name2,
const vpHomogeneousMatrix &c1Mo,
455 const std::map<std::string, std::string> &mapOfModelFiles,
456 const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses,
bool verbose =
false,
457 const std::map<std::string, vpHomogeneousMatrix> &mapOfT = std::map<std::string, vpHomogeneousMatrix>());
459 reInitModel(
const std::map<std::string,
const vpImage<vpRGBa> *> &mapOfColorImages,
460 const std::map<std::string, std::string> &mapOfModelFiles,
461 const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses,
bool verbose =
false,
462 const std::map<std::string, vpHomogeneousMatrix> &mapOfT = std::map<std::string, vpHomogeneousMatrix>());
468 virtual void setAngleAppear(
const std::map<std::string, double> &mapOfAngles);
472 virtual void setAngleDisappear(
const std::map<std::string, double> &mapOfAngles);
476 virtual void setCameraParameters(
const std::map<std::string, vpCameraParameters> &mapOfCameraParameters);
478 virtual void setCameraTransformationMatrix(
const std::string &cameraName,
481 setCameraTransformationMatrix(
const std::map<std::string, vpHomogeneousMatrix> &mapOfTransformationMatrix);
483 virtual void setClipping(
const unsigned int &flags);
484 virtual void setClipping(
const unsigned int &flags1,
const unsigned int &flags2);
485 virtual void setClipping(
const std::map<std::string, unsigned int> &mapOfClippingFlags);
487 virtual void setDepthDenseFilteringMaxDistance(
double maxDistance);
488 virtual void setDepthDenseFilteringMethod(
int method);
489 virtual void setDepthDenseFilteringMinDistance(
double minDistance);
490 virtual void setDepthDenseFilteringOccupancyRatio(
double occupancyRatio);
491 virtual void setDepthDenseSamplingStep(
unsigned int stepX,
unsigned int stepY);
495 virtual void setDepthNormalPclPlaneEstimationMethod(
int method);
496 virtual void setDepthNormalPclPlaneEstimationRansacMaxIter(
int maxIter);
497 virtual void setDepthNormalPclPlaneEstimationRansacThreshold(
double threshold);
498 virtual void setDepthNormalSamplingStep(
unsigned int stepX,
unsigned int stepY);
506 virtual void setFeatureFactors(
const std::map<vpTrackerType, double> &mapOfFeatureFactors);
508 virtual void setGoodMovingEdgesRatioThreshold(
double threshold);
515#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
516 virtual void setKltMaskBorder(
const unsigned int &e);
517 virtual void setKltMaskBorder(
const unsigned int &e1,
const unsigned int &e2);
518 virtual void setKltMaskBorder(
const std::map<std::string, unsigned int> &mapOfErosions);
522 virtual void setKltOpencv(
const std::map<std::string, vpKltOpencv> &mapOfKlts);
524 virtual void setKltThresholdAcceptation(
double th);
527 virtual void setLod(
bool useLod,
const std::string &name =
"");
534 virtual void setMovingEdge(
const vpMe &me);
535 virtual void setMovingEdge(
const vpMe &me1,
const vpMe &me2);
536 virtual void setMovingEdge(
const std::map<std::string, vpMe> &mapOfMe);
556 const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses);
558 const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses);
566 virtual void setReferenceCameraName(
const std::string &referenceCameraName);
570 virtual void setTrackerType(
int type);
571 virtual void setTrackerType(
const std::map<std::string, int> &mapOfTrackerTypes);
573 virtual void setUseDepthDenseTracking(
const std::string &name,
const bool &useDepthDenseTracking);
574 virtual void setUseDepthNormalTracking(
const std::string &name,
const bool &useDepthNormalTracking);
575 virtual void setUseEdgeTracking(
const std::string &name,
const bool &useEdgeTracking);
576#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
577 virtual void setUseKltTracking(
const std::string &name,
const bool &useKltTracking);
593 std::map<std::string, pcl::PointCloud<pcl::PointXYZ>::ConstPtr> &mapOfPointClouds);
595 std::map<std::string, pcl::PointCloud<pcl::PointXYZ>::ConstPtr> &mapOfPointClouds);
599 std::map<std::string,
const std::vector<vpColVector> *> &mapOfPointClouds,
600 std::map<std::string, unsigned int> &mapOfPointCloudWidths,
601 std::map<std::string, unsigned int> &mapOfPointCloudHeights);
603 std::map<std::string,
const std::vector<vpColVector> *> &mapOfPointClouds,
604 std::map<std::string, unsigned int> &mapOfPointCloudWidths,
605 std::map<std::string, unsigned int> &mapOfPointCloudHeights);
608 virtual void computeProjectionError();
616 std::map<std::string, vpVelocityTwistMatrix> &mapOfVelocityTwist);
621 const std::string &name =
"");
624 const std::string &name =
"");
630 virtual void loadConfigFileXML(
const std::string &configFile,
bool verbose =
true);
631#ifdef VISP_HAVE_NLOHMANN_JSON
632 virtual void loadConfigFileJSON(
const std::string &configFile,
bool verbose =
true);
637 std::map<std::string, pcl::PointCloud<pcl::PointXYZ>::ConstPtr> &mapOfPointClouds);
640 std::map<std::string,
const std::vector<vpColVector> *> &mapOfPointClouds,
641 std::map<std::string, unsigned int> &mapOfPointCloudWidths,
642 std::map<std::string, unsigned int> &mapOfPointCloudHeights);
646#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
653#ifdef VISP_HAVE_NLOHMANN_JSON
654 friend void to_json(nlohmann::json &j,
const TrackerWrapper &t);
655 friend void from_json(
const nlohmann::json &j, TrackerWrapper &t);
671 explicit TrackerWrapper(
int trackerType);
673 virtual ~TrackerWrapper();
676 virtual inline vpColVector getError()
const {
return m_error; }
678 virtual inline vpColVector getRobustWeights()
const {
return m_w; }
680 virtual inline int getTrackerType()
const {
return m_trackerType; }
683 const vpColor &col,
unsigned int thickness = 1,
bool displayFullModel =
false);
685 const vpColor &col,
unsigned int thickness = 1,
bool displayFullModel =
false);
687 virtual std::vector<std::vector<double> > getFeaturesForDisplay();
689 virtual std::vector<std::vector<double> > getModelForDisplay(
unsigned int width,
unsigned int height,
692 bool displayFullModel =
false);
696 virtual void loadConfigFile(
const std::string &configFile,
bool verbose =
true);
701 virtual void reInitModel(
const vpImage<vpRGBa> &I_color,
const std::string &cad_name,
705 virtual void resetTracker();
709 virtual void setClipping(
const unsigned int &flags);
711 virtual void setFarClippingDistance(
const double &dist);
713 virtual void setNearClippingDistance(
const double &dist);
715 virtual void setOgreVisibilityTest(
const bool &v);
720 virtual void setProjectionErrorComputation(
const bool &flag);
722 virtual void setScanLineVisibilityTest(
const bool &v);
724 virtual void setTrackerType(
int type);
726 virtual void testTracking();
733#if !((__GNUC__ == 4) && (__GNUC_MINOR__ == 6))
739 const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud);
744 virtual void computeVVSInit();
746 virtual void computeVVSInteractionMatrixAndResidu();
750 virtual void computeVVSWeights();
752 virtual void initCircle(
const vpPoint &p1,
const vpPoint &p2,
const vpPoint &p3,
double radius,
int idFace = 0,
753 const std::string &name =
"");
755 virtual void initCylinder(
const vpPoint &p1,
const vpPoint &p2,
double radius,
int idFace = 0,
756 const std::string &name =
"");
758 virtual void initFaceFromCorners(
vpMbtPolygon &polygon);
765 const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud);
767 const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud);
769 virtual void postTracking(
const vpImage<unsigned char> *
const ptr_I = NULL,
const unsigned int pointcloud_width = 0,
770 const unsigned int pointcloud_height = 0);
772 const std::vector<vpColVector> *
const point_cloud = NULL,
773 const unsigned int pointcloud_width = 0,
const unsigned int pointcloud_height = 0);
782#if !((__GNUC__ == 4) && (__GNUC_MINOR__ == 6))
790#ifdef VISP_HAVE_NLOHMANN_JSON
791 friend void to_json(nlohmann::json &j,
const TrackerWrapper &t);
792 friend void from_json(
const nlohmann::json &j, TrackerWrapper &t);
857inline void to_json(nlohmann::json &j,
const vpMbGenericTracker::TrackerWrapper &t)
860 const static std::vector<vpMbGenericTracker::vpTrackerType> trackerTypes = {
862 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
870 {
"type", flagsToJSON(t.m_trackerType, trackerTypes)},
872 {
"angleDisappear",
vpMath::deg(t.getAngleDisappear())},
874 {
"useLod", t.useLodGeneral},
875 {
"minLineLengthThresholdGeneral", t.minLineLengthThresholdGeneral},
876 {
"minPolygonAreaThresholdGeneral", t.minPolygonAreaThresholdGeneral}
879 {
"features", t.displayFeatures},
880 {
"projectionError", t.m_projectionErrorDisplay}
884 {
"scanline", t.useScanLine}
887 {
"flags", clippingFlagsToJSON(t.getClipping())},
888 {
"near", t.getNearClippingDistance()},
889 {
"far", t.getFarClippingDistance()},
898#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
900 nlohmann::json klt = nlohmann::json {
901 {
"maxFeatures", t.tracker.getMaxFeatures()},
902 {
"windowSize", t.tracker.getWindowSize()},
903 {
"quality", t.tracker.getQuality()},
904 {
"minDistance", t.tracker.getMinDistance()},
905 {
"harris", t.tracker.getHarrisFreeParameter()},
906 {
"blockSize", t.tracker.getBlockSize()},
907 {
"pyramidLevels", t.tracker.getPyramidLevels()}
909 klt[
"maskBorder"] = t.maskBorder;
915 j[
"normals"] = nlohmann::json {
916 {
"featureEstimationMethod", t.m_depthNormalFeatureEstimationMethod},
918 {
"method", t.m_depthNormalPclPlaneEstimationMethod},
919 {
"ransacMaxIter", t.m_depthNormalPclPlaneEstimationRansacMaxIter},
920 {
"ransacThreshold", t.m_depthNormalPclPlaneEstimationRansacThreshold}
923 {
"x", t.m_depthNormalSamplingStepX},
924 {
"y", t.m_depthNormalSamplingStepY}
932 {
"x", t.m_depthDenseSamplingStepX},
933 {
"y", t.m_depthDenseSamplingStepY}
958inline void from_json(
const nlohmann::json &j, vpMbGenericTracker::TrackerWrapper &t)
960 t.setCameraParameters(j.at(
"camera"));
961 t.setTrackerType(flagsFromJSON<vpMbGenericTracker::vpTrackerType>(j.at(
"type")));
963 if (j.contains(
"angleAppear")) {
964 t.setAngleAppear(
vpMath::rad(
static_cast<double>(j.at(
"angleAppear"))));
966 if (j.contains(
"angleDisappear")) {
967 t.setAngleDisappear(
vpMath::rad(
static_cast<double>(j.at(
"angleDisappear"))));
969 if (j.contains(
"clipping")) {
970 const nlohmann::json clipping = j[
"clipping"];
971 t.setNearClippingDistance(clipping.value(
"near", t.getNearClippingDistance()));
972 t.setFarClippingDistance(clipping.value(
"far", t.getFarClippingDistance()));
973 if (clipping.contains(
"flags")) {
974 t.setClipping(flagsFromJSON<vpPolygon3D::vpPolygon3DClippingType>(clipping.at(
"flags")));
977 if (j.contains(
"lod")) {
978 const nlohmann::json lod = j[
"lod"];
979 t.useLodGeneral = lod.value(
"useLod", t.useLodGeneral);
980 t.minLineLengthThresholdGeneral = lod.value(
"minLineLengthThresholdGeneral", t.minLineLengthThresholdGeneral);
981 t.minPolygonAreaThresholdGeneral = lod.value(
"minPolygonAreaThresholdGeneral", t.minPolygonAreaThresholdGeneral);
982 t.applyLodSettingInConfig =
false;
983 if (t.getNbPolygon() > 0) {
984 t.applyLodSettingInConfig =
true;
985 t.setLod(t.useLodGeneral);
986 t.setMinLineLengthThresh(t.minLineLengthThresholdGeneral);
987 t.setMinPolygonAreaThresh(t.minPolygonAreaThresholdGeneral);
990 if (j.contains(
"display")) {
991 const nlohmann::json displayJson = j[
"display"];
992 t.setDisplayFeatures(displayJson.value(
"features", t.displayFeatures));
993 t.setProjectionErrorDisplay(displayJson.value(
"projectionError", t.m_projectionErrorDisplay));
995 if (j.contains(
"visibilityTest")) {
996 const nlohmann::json visJson = j[
"visibilityTest"];
997 t.setOgreVisibilityTest(visJson.value(
"ogre", t.useOgre));
998 t.setScanLineVisibilityTest(visJson.value(
"scanline", t.useScanLine));
1007#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
1009 const nlohmann::json klt = j.at(
"klt");
1010 auto &ktrack = t.tracker;
1011 ktrack.setMaxFeatures(klt.value(
"maxFeatures", 10000));
1012 ktrack.setWindowSize(klt.value(
"windowSize", 5));
1013 ktrack.setQuality(klt.value(
"quality", 0.01));
1014 ktrack.setMinDistance(klt.value(
"minDistance", 5));
1015 ktrack.setHarrisFreeParameter(klt.value(
"harris", 0.01));
1016 ktrack.setBlockSize(klt.value(
"blockSize", 3));
1017 ktrack.setPyramidLevels(klt.value(
"pyramidLevels", 3));
1018 t.setMaskBorder(klt.value(
"maskBorder", t.maskBorder));
1019 t.faces.getMbScanLineRenderer().setMaskBorder(t.maskBorder);
1022 if (j.contains(
"klt")) {
1023 std::cerr <<
"Trying to load a KLT tracker, but the ViSP dependency requirements are not met. Ignoring." << std::endl;
1028 const nlohmann::json n = j.at(
"normals");
1029 t.setDepthNormalFeatureEstimationMethod(n.at(
"featureEstimationMethod"));
1030 if (n.contains(
"pcl")) {
1031 const nlohmann::json pcl = n[
"pcl"];
1032 t.setDepthNormalPclPlaneEstimationMethod(pcl.at(
"method"));
1033 t.setDepthNormalPclPlaneEstimationRansacMaxIter(pcl.at(
"ransacMaxIter"));
1034 t.setDepthNormalPclPlaneEstimationRansacThreshold(pcl.at(
"ransacThreshold"));
1036 if (n.contains(
"sampling")) {
1037 const nlohmann::json sampling = n.at(
"sampling");
1038 t.setDepthNormalSamplingStep(sampling.at(
"x"), sampling.at(
"y"));
1043 const nlohmann::json dense = j.at(
"dense");
1044 if (dense.contains(
"sampling")) {
1045 const nlohmann::json sampling = dense.at(
"sampling");
1046 t.setDepthDenseSamplingStep(sampling.at(
"x"), sampling.at(
"y"));