From 527787bdf93f1232adee072a969238e5906a8e37 Mon Sep 17 00:00:00 2001 From: Fabien Spindler Date: Mon, 18 Dec 2023 17:50:27 +0100 Subject: [PATCH] Add options WITH_PUGIXML and WITH_STBIMAGE to be able to enable/disable these 3rdparties usage --- CMakeLists.txt | 8 +- .../visp-acquire-franka-calib-data.cpp | 8 +- ...sp-acquire-universal-robots-calib-data.cpp | 5 +- .../visp-compute-chessboard-poses.cpp | 9 +- cmake/VISP3rdParty.cmake | 24 +- cmake/VISPGenerateConfigScript.cmake | 2 + cmake/templates/VISPConfig.cmake.in | 2 + cmake/templates/vpConfig.h.in | 5 +- doc/config-doxygen.in | 2 + example/calibration/calibrate-camera.cpp | 140 +++++---- .../device/framegrabber/saveRealSenseData.cpp | 6 +- example/servo-bebop2/servoBebop2.cpp | 6 + example/servo-flir-ptu/servoFlirPtuIBVS.cpp | 250 ++++++++------- .../servo-pixhawk/servoPixhawkDroneIBVS.cpp | 12 +- example/tracking/mbtEdgeKltTracking.cpp | 13 +- example/tracking/mbtEdgeTracking.cpp | 11 +- example/tracking/mbtGenericTracking.cpp | 12 +- example/tracking/mbtGenericTracking2.cpp | 12 +- example/tracking/mbtGenericTrackingDepth.cpp | 7 +- .../tracking/mbtGenericTrackingDepthOnly.cpp | 42 +-- example/tracking/mbtKltTracking.cpp | 41 ++- modules/core/CMakeLists.txt | 30 +- .../include/visp3/core/vpXmlParserCamera.h | 2 + .../visp3/core/vpXmlParserHomogeneousMatrix.h | 2 + .../visp3/core/vpXmlParserRectOriented.h | 2 + modules/core/src/camera/vpXmlParserCamera.cpp | 7 + .../vpXmlParserHomogeneousMatrix.cpp | 58 ++-- .../geometry/vpXmlParserRectOriented.cpp | 17 +- .../core/test/camera/testXmlParserCamera.cpp | 3 + .../test/image-with-dataset/testImageWarp.cpp | 45 +-- .../math/testXmlParserHomogeneousMatrix.cpp | 2 + .../geometry/testXmlParserRectOriented.cpp | 2 + modules/io/CMakeLists.txt | 15 +- modules/io/include/visp3/io/vpImageIo.h | 10 +- .../io/src/image/private/vpImageIoBackend.h | 4 +- modules/io/src/image/private/vpImageIoStb.cpp | 6 + .../io/src/image/private/vpImageIoTinyEXR.cpp | 3 +- modules/io/src/image/vpImageIo.cpp | 288 ++++++++++++++++-- .../image-with-dataset/testImageLoadSave.cpp | 2 + .../robot/src/real-robot/afma6/vpAfma6.cpp | 123 ++++---- .../robot/src/real-robot/viper/vpViper650.cpp | 64 ++-- .../robot/src/real-robot/viper/vpViper850.cpp | 64 ++-- .../sensor/src/rgb-depth/kinect/vpKinect.cpp | 29 +- modules/tracker/mbt/CMakeLists.txt | 13 +- .../include/visp3/mbt/vpMbtXmlGenericParser.h | 5 +- .../mbt/src/depth/vpMbDepthDenseTracker.cpp | 28 +- .../mbt/src/depth/vpMbDepthNormalTracker.cpp | 34 ++- .../tracker/mbt/src/edge/vpMbEdgeTracker.cpp | 131 +++++--- .../mbt/src/hybrid/vpMbEdgeKltTracker.cpp | 8 +- .../tracker/mbt/src/klt/vpMbKltTracker.cpp | 66 ++-- .../tracker/mbt/src/vpMbGenericTracker.cpp | 6 + modules/tracker/mbt/src/vpMbTracker.cpp | 6 + .../tracker/mbt/src/vpMbtXmlGenericParser.cpp | 7 + .../perfGenericTracker.cpp | 106 ++++++- .../testGenericTracker.cpp | 4 +- .../testGenericTrackerDepth.cpp | 4 +- .../testGenericTrackerDeterminist.cpp | 40 +++ .../testMbtXmlGenericParser.cpp | 4 +- .../visp3/vision/vpXmlConfigParserKeyPoint.h | 2 + modules/vision/src/key-point/vpKeyPoint.cpp | 15 + .../key-point/vpXmlConfigParserKeyPoint.cpp | 7 + .../keypoint-with-dataset/testKeyPoint-2.cpp | 3 +- .../keypoint-with-dataset/testKeyPoint-4.cpp | 3 +- .../keypoint-with-dataset/testKeyPoint-7.cpp | 53 ++-- .../testXmlConfigParserKeyPoint.cpp | 3 +- .../tutorial-pose-from-planar-object.cpp | 6 +- .../tutorial-pose-from-points-live.cpp | 2 +- ...torial-detection-object-mbt-deprecated.cpp | 2 + .../object/tutorial-detection-object-mbt.cpp | 2 + ...orial-detection-object-mbt2-deprecated.cpp | 6 +- .../object/tutorial-detection-object-mbt2.cpp | 6 +- ...-apriltag-detector-live-T265-realsense.cpp | 60 ++-- ...-apriltag-detector-live-rgbd-realsense.cpp | 67 ++-- .../tag/tutorial-apriltag-detector-live.cpp | 8 +- .../tag/tutorial-apriltag-detector.cpp | 70 +++-- tutorial/image/tutorial-undistort.cpp | 32 +- .../robot/flir-ptu/tutorial-flir-ptu-ibvs.cpp | 85 +++--- .../raspberry/visp/mbot-apriltag-ibvs.cpp | 71 +++-- .../raspberry/visp/mbot-apriltag-pbvs.cpp | 63 ++-- ...torial-mb-generic-tracker-apriltag-rs2.cpp | 1 - ...ial-mb-generic-tracker-apriltag-webcam.cpp | 2 + ...torial-mb-generic-tracker-rgbd-blender.cpp | 3 +- .../model-based/generic-rgbd/CMakeLists.txt | 3 +- ...rial-mb-generic-tracker-rgbd-realsense.cpp | 158 ++++++---- ...mb-generic-tracker-rgbd-structure-core.cpp | 153 ++++++---- .../tutorial-mb-generic-tracker-rgbd.cpp | 121 ++++++-- ...utorial-mb-generic-tracker-stereo-mono.cpp | 4 +- .../tutorial-mb-generic-tracker-stereo.cpp | 33 +- .../tutorial-mb-generic-tracker-full.cpp | 2 + .../tutorial-mb-generic-tracker-live.cpp | 10 +- .../old/edges/tutorial-mb-edge-tracker.cpp | 4 +- .../old/generic/tutorial-mb-tracker-full.cpp | 2 + .../old/hybrid/tutorial-mb-hybrid-tracker.cpp | 2 + .../old/keypoint/tutorial-mb-klt-tracker.cpp | 9 +- 94 files changed, 1942 insertions(+), 988 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index f4f8734603..3eb48f7f73 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -648,7 +648,9 @@ VP_OPTION(WITH_QBDEVICE "" "" "Build qbdevice-api as built-in lib VP_OPTION(WITH_TAKKTILE2 "" "" "Build Right Hand takktile2 driver as built-in library" "" ON IF (VISP_CXX_STANDARD GREATER VISP_CXX_STANDARD_98) AND (NOT WIN32) AND (NOT WINRT) AND (NOT IOS) AND (NOT ANDROID)) VP_OPTION(WITH_CATCH2 "" "" "Use catch2" "" ON IF (VISP_CXX_STANDARD GREATER VISP_CXX_STANDARD_98)) VP_OPTION(WITH_POLOLU "" "" "Build rapa pololu as built-in library" "" ON IF (NOT WINRT) AND (NOT IOS) AND (NOT ANDROID)) +VP_OPTION(WITH_PUGIXML "" "" "Use pugixml built-in third-party" "" ON) VP_OPTION(WITH_SIMDLIB "" "" "Use simdlib built-in third-party" "" ON) +VP_OPTION(WITH_STBIMAGE "" "" "Use std_image built-in third-party" "" ON) VP_OPTION(WITH_TINYEXR "" "" "Use tinyexr built-in third-party" "" ON) # ---------------------------------------------------------------------------- @@ -944,7 +946,9 @@ VP_SET(VISP_HAVE_TAKKTILE2 TRUE IF (BUILD_MODULE_visp_robot AND WITH_TAKKTILE2 VP_SET(VISP_HAVE_POLOLU TRUE IF (BUILD_MODULE_visp_robot AND WITH_POLOLU)) VP_SET(VISP_HAVE_CATCH2 TRUE IF (BUILD_MODULE_visp_core AND WITH_CATCH2)) VP_SET(VISP_HAVE_SIMDLIB TRUE IF (BUILD_MODULE_visp_core AND WITH_SIMDLIB)) +VP_SET(VISP_HAVE_STBIMAGE TRUE IF (BUILD_MODULE_visp_core AND WITH_STBIMAGE)) VP_SET(VISP_HAVE_TINYEXR TRUE IF (BUILD_MODULE_visp_core AND WITH_TINYEXR)) +VP_SET(VISP_HAVE_PUGIXML TRUE IF (BUILD_MODULE_visp_core AND WITH_PUGIXML)) VP_SET(VISP_HAVE_QUALISYS TRUE IF (BUILD_MODULE_visp_sensor AND USE_QUALISYS)) VP_SET(VISP_HAVE_VICON TRUE IF (BUILD_MODULE_visp_sensor AND USE_VICON)) @@ -1507,7 +1511,7 @@ status(" Use JPEG:" USE_JPEG THEN "yes (ver ${JPEG_LIB_ status(" Use PNG:" USE_PNG THEN "yes (ver ${PNG_VERSION_STRING})" ELSE "no") status(" \\- Use ZLIB:" USE_ZLIB THEN "yes (ver ${ZLIB_VERSION_STRING})" ELSE "no") status(" Use OpenCV:" USE_OPENCV THEN "yes (ver ${OpenCV_VERSION})" ELSE "no") -status(" Use stb_image (built-in):" "yes (ver ${STBIMAGE_VERSION})") +status(" Use stb_image (built-in):" WITH_STBIMAGE THEN "yes (ver ${STBIMAGE_VERSION})" ELSE "no") status(" Use TinyEXR (built-in):" WITH_TINYEXR THEN "yes (ver ${TINYEXR_VERSION})" ELSE "no") status(" Use simdlib (built-in):" WITH_SIMDLIB THEN "yes" ELSE "no") status("") @@ -1576,7 +1580,7 @@ status(" \\- Use AprilTag big family:" WITH_APRILTAG_BIG_FAMILY THEN "yes" E status("") status(" Misc: ") status(" Use Clipper (built-in):" WITH_CLIPPER THEN "yes (ver ${CLIPPER_VERSION})" ELSE "no") -status(" Use pugixml (built-in):" "yes (ver ${PUGIXML_VERSION})") +status(" Use pugixml (built-in):" WITH_PUGIXML THEN "yes (ver ${PUGIXML_VERSION})" ELSE "no") status(" Use libxml2:" USE_XML2 THEN "yes (ver ${XML2_VERSION_STRING})" ELSE "no") status(" Use json (nlohmann):" USE_NLOHMANN_JSON THEN "yes (ver ${nlohmann_json_VERSION})" ELSE "no") status("") diff --git a/apps/calibration/visp-acquire-franka-calib-data.cpp b/apps/calibration/visp-acquire-franka-calib-data.cpp index 3498622c4b..2de13c8a44 100644 --- a/apps/calibration/visp-acquire-franka-calib-data.cpp +++ b/apps/calibration/visp-acquire-franka-calib-data.cpp @@ -43,7 +43,7 @@ #include #if defined(VISP_HAVE_REALSENSE2) && \ - (defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI)) && defined(VISP_HAVE_FRANKA) && \ + (defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI)) && defined(VISP_HAVE_FRANKA) && defined(VISP_HAVE_PUGIXML) && \ defined(VISP_HAVE_MODULE_GUI) && defined(VISP_HAVE_MODULE_ROBOT) && defined(VISP_HAVE_MODULE_SENSOR) // optional void usage(const char **argv, int error, const std::string &robot_ip) @@ -103,8 +103,9 @@ int main(int argc, const char **argv) std::cout << "Image size: " << width << " x " << height << std::endl; // Save intrinsics vpCameraParameters cam; - vpXmlParserCamera xml_camera; + cam = g.getCameraParameters(RS2_STREAM_COLOR, vpCameraParameters::perspectiveProjWithDistortion); + vpXmlParserCamera xml_camera; xml_camera.save(cam, "franka_camera.xml", "Camera", width, height); #if defined(VISP_HAVE_X11) @@ -172,6 +173,9 @@ int main() #if !defined(VISP_HAVE_FRANKA) std::cout << "Install libfranka." << std::endl; #endif +#if !defined(VISP_HAVE_PUGIXML) + std::cout << "Enable pugyxml built-in usage." << std::endl; +#endif std::cout << "After installation of the missing 3rd parties, configure ViSP with cmake " << "and build ViSP again." << std::endl; diff --git a/apps/calibration/visp-acquire-universal-robots-calib-data.cpp b/apps/calibration/visp-acquire-universal-robots-calib-data.cpp index 7ecadd446d..aea3d41630 100644 --- a/apps/calibration/visp-acquire-universal-robots-calib-data.cpp +++ b/apps/calibration/visp-acquire-universal-robots-calib-data.cpp @@ -43,7 +43,7 @@ #include #if defined(VISP_HAVE_REALSENSE2) && \ - (defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI)) && defined(VISP_HAVE_UR_RTDE) && \ + (defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI)) && defined(VISP_HAVE_UR_RTDE) && defined(VISP_HAVE_PUGIXML) && \ defined(VISP_HAVE_MODULE_GUI) && defined(VISP_HAVE_MODULE_ROBOT) && defined(VISP_HAVE_MODULE_SENSOR) // optional void usage(const char **argv, int error, const std::string &robot_ip) @@ -175,6 +175,9 @@ int main() std::cout << "ViSP is not build with libur_rtde 3rd party used to control a robot from Universal Robots..." << std::endl; #endif +#if !defined(VISP_HAVE_PUGIXML) + std::cout << "Enable pugyxml built-in usage." << std::endl; +#endif std::cout << "After installation of the missing 3rd parties, configure ViSP with cmake" << " and build ViSP again." << std::endl; diff --git a/apps/calibration/visp-compute-chessboard-poses.cpp b/apps/calibration/visp-compute-chessboard-poses.cpp index 9c095bf1b3..b0b56b2af3 100644 --- a/apps/calibration/visp-compute-chessboard-poses.cpp +++ b/apps/calibration/visp-compute-chessboard-poses.cpp @@ -36,7 +36,7 @@ #include -#if defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_CALIB3D) +#if defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_CALIB3D) && defined(VISP_HAVE_PUGIXML) #include #include @@ -345,7 +345,12 @@ int main(int argc, const char **argv) #else int main() { - std::cerr << "OpenCV 2.3.0 or higher is requested to run the calibration." << std::endl; +#if !(defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_CALIB3D)) + std::cerr << "OpenCV calib3d module is requested to run the calibration." << std::endl; +#endif +#if !defined(VISP_HAVE_PUGIXML) + std::cout << "pugixml built-in 3rdparty is requested to run the calibration." << std::endl; +#endif return EXIT_SUCCESS; } #endif diff --git a/cmake/VISP3rdParty.cmake b/cmake/VISP3rdParty.cmake index 13014f6761..d821000499 100644 --- a/cmake/VISP3rdParty.cmake +++ b/cmake/VISP3rdParty.cmake @@ -57,12 +57,13 @@ if(WITH_TAKKTILE2) set(TAKKTILE2_VERSION ${TAKKTILE2_MAJOR_VERSION}.${TAKKTILE2_MINOR_VERSION}.${TAKKTILE2_PATCH_VERSION}) endif() -# pugixml is always enabled to provide default XML I/O capabilities -set(PUGIXML_LIBRARY visp_pugixml) -add_subdirectory("${VISP_SOURCE_DIR}/3rdparty/pugixml-1.9") -set(PUGIXML_INCLUDE_DIRS "${${PUGIXML_LIBRARY}_SOURCE_DIR}" "${${PUGIXML_LIBRARY}_BINARY_DIR}") -set(PUGIXML_LIBRARIES ${PUGIXML_LIBRARY}) -set(PUGIXML_VERSION ${PUGIXML_MAJOR_VERSION}.${PUGIXML_MINOR_VERSION}.${PUGIXML_PATCH_VERSION}) +if(WITH_PUGIXML) + set(PUGIXML_LIBRARY visp_pugixml) + add_subdirectory("${VISP_SOURCE_DIR}/3rdparty/pugixml-1.9") + set(PUGIXML_INCLUDE_DIRS "${${PUGIXML_LIBRARY}_SOURCE_DIR}" "${${PUGIXML_LIBRARY}_BINARY_DIR}") + set(PUGIXML_LIBRARIES ${PUGIXML_LIBRARY}) + set(PUGIXML_VERSION ${PUGIXML_MAJOR_VERSION}.${PUGIXML_MINOR_VERSION}.${PUGIXML_PATCH_VERSION}) +endif() if(WITH_SIMDLIB) set(SIMD_LIBRARY visp_simdlib) @@ -71,11 +72,12 @@ if(WITH_SIMDLIB) set(SIMDLIB_LIBRARIES ${SIMD_LIBRARY}) endif() -# stb is always enabled -set(STBIMAGE_LIBRARY visp_stbimage) -add_subdirectory("${VISP_SOURCE_DIR}/3rdparty/stb_image") -set(STBIMAGE_INCLUDE_DIRS "${VISP_SOURCE_DIR}/3rdparty/stb_image") -set(STBIMAGE_VERSION ${STBIMAGE_MAJOR_VERSION}.${STBIMAGE_MINOR_VERSION}.${STBIMAGE_PATCH_VERSION}) +if(WITH_STBIMAGE) + set(STBIMAGE_LIBRARY visp_stbimage) + add_subdirectory("${VISP_SOURCE_DIR}/3rdparty/stb_image") + set(STBIMAGE_INCLUDE_DIRS "${VISP_SOURCE_DIR}/3rdparty/stb_image") + set(STBIMAGE_VERSION ${STBIMAGE_MAJOR_VERSION}.${STBIMAGE_MINOR_VERSION}.${STBIMAGE_PATCH_VERSION}) +endif() if(WITH_TINYEXR) set(TINYEXR_LIBRARY visp_tinyexr) diff --git a/cmake/VISPGenerateConfigScript.cmake b/cmake/VISPGenerateConfigScript.cmake index 6bcbfeeca6..17d4809160 100644 --- a/cmake/VISPGenerateConfigScript.cmake +++ b/cmake/VISPGenerateConfigScript.cmake @@ -181,7 +181,9 @@ if(NOT DEFINED CMAKE_HELPER_SCRIPT) VISP_HAVE_OPENMP WITH_CATCH2 + WITH_PUGIXML WITH_SIMDLIB + WITH_STBIMAGE WITH_TINYEXR FILE_VISP_SCRIPT_CONFIG diff --git a/cmake/templates/VISPConfig.cmake.in b/cmake/templates/VISPConfig.cmake.in index 9ba5fdadfc..58607a222e 100644 --- a/cmake/templates/VISPConfig.cmake.in +++ b/cmake/templates/VISPConfig.cmake.in @@ -246,6 +246,7 @@ set(VISP_HAVE_PNG "@VISP_HAVE_PNG@") set(VISP_HAVE_POLOLU "@VISP_HAVE_POLOLU@") set(VISP_HAVE_PTHREAD "@VISP_HAVE_PTHREAD@") set(VISP_HAVE_PTU46 "@VISP_HAVE_PTU46@") +set(VISP_HAVE_PUGIXML "@VISP_HAVE_PUGIXML@") set(VISP_HAVE_PYLON "@VISP_HAVE_PYLON@") set(VISP_HAVE_QBDEVICE "@VISP_HAVE_QBDEVICE@") set(VISP_HAVE_QT "@VISP_HAVE_QT@") @@ -256,6 +257,7 @@ set(VISP_HAVE_SIMDLIB "@VISP_HAVE_SIMDLIB@") set(VISP_HAVE_SOQT "@VISP_HAVE_SOQT@") set(VISP_HAVE_SOWIN "@VISP_HAVE_SOWIN@") set(VISP_HAVE_SOXT "@VISP_HAVE_SOXT@") +set(VISP_HAVE_STBIMAGE "@VISP_HAVE_STBIMAGE@") set(VISP_HAVE_TAKKTILE2 "@VISP_HAVE_TAKKTILE2@") set(VISP_HAVE_TENSORRT "@VISP_HAVE_TENSORRT@") set(VISP_HAVE_THREADS "@VISP_HAVE_THREADS@") diff --git a/cmake/templates/vpConfig.h.in b/cmake/templates/vpConfig.h.in index db098b7bee..78f4df6082 100644 --- a/cmake/templates/vpConfig.h.in +++ b/cmake/templates/vpConfig.h.in @@ -141,7 +141,7 @@ #cmakedefine VISP_HAVE_X11 // Always define pugixml for compatibility. -#define VISP_HAVE_PUGIXML +#cmakedefine VISP_HAVE_PUGIXML // Defined if XML2 library available. #cmakedefine VISP_HAVE_XML2 @@ -195,6 +195,9 @@ // Defined if simdlib library available #cmakedefine VISP_HAVE_SIMDLIB +// Defined if stb_image library available +#cmakedefine VISP_HAVE_STBIMAGE + // Defined if tinyexr library available #cmakedefine VISP_HAVE_TINYEXR diff --git a/doc/config-doxygen.in b/doc/config-doxygen.in index bb9eaf48ac..6ce50a8d93 100644 --- a/doc/config-doxygen.in +++ b/doc/config-doxygen.in @@ -2206,6 +2206,7 @@ PREDEFINED = @DOXYGEN_SHOULD_SKIP_THIS@ \ VISP_HAVE_POLOLU \ VISP_HAVE_PTHREAD \ VISP_HAVE_PTU46 \ + VISP_HAVE_PUGIXML \ VISP_HAVE_PYLON \ VISP_HAVE_QBDEVICE \ VISP_HAVE_QT \ @@ -2217,6 +2218,7 @@ PREDEFINED = @DOXYGEN_SHOULD_SKIP_THIS@ \ VISP_HAVE_SOWIN \ VISP_HAVE_SOQT \ VISP_HAVE_SOXT \ + VISP_HAVE_STBIMAGE \ VISP_HAVE_TAKKTILE2 \ VISP_HAVE_TENSORRT \ VISP_HAVE_THREADS \ diff --git a/example/calibration/calibrate-camera.cpp b/example/calibration/calibrate-camera.cpp index 84f59da1f7..560f65a4f1 100644 --- a/example/calibration/calibrate-camera.cpp +++ b/example/calibration/calibrate-camera.cpp @@ -36,7 +36,8 @@ #include -#if (VISP_HAVE_OPENCV_VERSION >= 0x030000) && defined(HAVE_OPENCV_CALIB3D) && defined(HAVE_OPENCV_HIGHGUI) && defined(HAVE_OPENCV_IMGPROC) +#if (VISP_HAVE_OPENCV_VERSION >= 0x030000) && defined(HAVE_OPENCV_CALIB3D) && defined(HAVE_OPENCV_HIGHGUI) && \ + defined(HAVE_OPENCV_IMGPROC) && defined(VISP_HAVE_PUGIXML) #include @@ -67,34 +68,34 @@ using namespace calib_helper; void usage(const char *argv[], int error) { std::cout << "Synopsis" << std::endl - << " " << argv[0] << " .cfg [--init-from-xml ]" - << " [--camera-name ] [--aspect-ratio ] [--output ] [--help] [-h]" << std::endl - << std::endl; + << " " << argv[0] << " .cfg [--init-from-xml ]" + << " [--camera-name ] [--aspect-ratio ] [--output ] [--help] [-h]" << std::endl + << std::endl; std::cout << "Description" << std::endl - << " .cfg Configuration file. See example in" << std::endl - << " \"default-chessboard.cfg\" or in \"default-circles.cfg\"." << std::endl - << " Default: \"default.cfg\"." << std::endl - << std::endl - << " --init-from-xml XML file that contains camera parameters" << std::endl - << " used to initialize the calibration process." << std::endl - << std::endl - << " --camera-name Camera name in the XML file set using \"--init-from-xml\" option." << std::endl - << " Default: \"Camera\"." << std::endl - << std::endl - << " --aspect-ratio Pixel aspect ratio. " << std::endl - << " To estimate px = py, use \"--aspect-ratio 1\" option. Set to -1" << std::endl - << " to unset any constraint for px and py parameters. " << std::endl - << " Default: -1." << std::endl - << std::endl - << " --output XML file containing estimated camera parameters." << std::endl - << " Default: \"camera.xml\"." << std::endl - << std::endl - << " --help, -h Print this helper message." << std::endl - << std::endl; + << " .cfg Configuration file. See example in" << std::endl + << " \"default-chessboard.cfg\" or in \"default-circles.cfg\"." << std::endl + << " Default: \"default.cfg\"." << std::endl + << std::endl + << " --init-from-xml XML file that contains camera parameters" << std::endl + << " used to initialize the calibration process." << std::endl + << std::endl + << " --camera-name Camera name in the XML file set using \"--init-from-xml\" option." << std::endl + << " Default: \"Camera\"." << std::endl + << std::endl + << " --aspect-ratio Pixel aspect ratio. " << std::endl + << " To estimate px = py, use \"--aspect-ratio 1\" option. Set to -1" << std::endl + << " to unset any constraint for px and py parameters. " << std::endl + << " Default: -1." << std::endl + << std::endl + << " --output XML file containing estimated camera parameters." << std::endl + << " Default: \"camera.xml\"." << std::endl + << std::endl + << " --help, -h Print this helper message." << std::endl + << std::endl; if (error) { std::cout << "Error" << std::endl - << " " - << "Unsupported parameter " << argv[error] << std::endl; + << " " + << "Unsupported parameter " << argv[error] << std::endl; } } @@ -116,19 +117,24 @@ int main(int argc, const char *argv[]) if (std::string(argv[i]) == "--init-from-xml" && i + 1 < argc) { opt_init_camera_xml_file = std::string(argv[i + 1]); i++; - } else if (std::string(argv[i]) == "--camera-name" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--camera-name" && i + 1 < argc) { opt_camera_name = std::string(argv[i + 1]); i++; - } else if (std::string(argv[i]) == "--output" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--output" && i + 1 < argc) { opt_output_file_name = std::string(argv[i + 1]); i++; - } else if (std::string(argv[i]) == "--aspect-ratio" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--aspect-ratio" && i + 1 < argc) { opt_aspect_ratio = std::atof(argv[i + 1]); i++; - } else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") { + } + else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") { usage(argv, 0); return EXIT_SUCCESS; - } else { + } + else { usage(argv, i); return EXIT_FAILURE; } @@ -157,7 +163,7 @@ int main(int argc, const char *argv[]) if (vpIoTools::checkFilename(opt_output_file_name)) { std::cout << "\nOutput file name " << opt_output_file_name << " already exists." << std::endl; std::cout << "Remove this file or change output file name using [--output ] command line option." - << std::endl; + << std::endl; return EXIT_SUCCESS; } @@ -167,10 +173,11 @@ int main(int argc, const char *argv[]) reader.setFileName(s.input); try { reader.open(I); - } catch (const vpException &e) { + } + catch (const vpException &e) { std::cout << "Catch an exception: " << e.getStringMessage() << std::endl; std::cout << "Check if input images name \"" << s.input << "\" set in " << opt_inputSettingsFile - << " config file is correct..." << std::endl; + << " config file is correct..." << std::endl; return EXIT_FAILURE; } @@ -200,11 +207,12 @@ int main(int argc, const char *argv[]) if (parser.parse(cam_init, opt_init_camera_xml_file, opt_camera_name, vpCameraParameters::perspectiveProjWithoutDistortion) != vpXmlParserCamera::SEQUENCE_OK) { std::cout << "Unable to find camera with name \"" << opt_camera_name - << "\" in file: " << opt_init_camera_xml_file << std::endl; + << "\" in file: " << opt_init_camera_xml_file << std::endl; std::cout << "Modify [--camera-name ] option value" << std::endl; return EXIT_FAILURE; } - } else { + } + else { std::cout << "Initialize camera parameters with default values " << std::endl; // Initialize camera parameters double px = cam_init.get_px(); @@ -292,7 +300,7 @@ int main(int argc, const char *argv[]) } if (!calib_status) { std::cout << "frame: " << frame_name << ", unable to calibrate from single image, image rejected" - << std::endl; + << std::endl; found = false; } } @@ -309,7 +317,8 @@ int main(int argc, const char *argv[]) "A click to process the next image", vpColor::green); vpDisplay::flush(I); vpDisplay::getClick(I); - } else { + } + else { vpDisplay::flush(I); vpTime::wait(s.tempo * 1000); } @@ -448,15 +457,16 @@ int main(int argc, const char *argv[]) if (xml.save(cam, opt_output_file_name.c_str(), opt_camera_name, I.getWidth(), I.getHeight()) == vpXmlParserCamera::SEQUENCE_OK) std::cout << "Camera parameters without distortion successfully saved in \"" << opt_output_file_name << "\"" - << std::endl; + << std::endl; else { std::cout << "Failed to save the camera parameters without distortion in \"" << opt_output_file_name << "\"" - << std::endl; + << std::endl; std::cout << "A file with the same name exists. Remove it to be able " - "to save the parameters..." - << std::endl; + "to save the parameters..." + << std::endl; } - } else { + } + else { std::cout << "Calibration without distortion failed." << std::endl; return EXIT_FAILURE; } @@ -520,8 +530,8 @@ int main(int argc, const char *argv[]) for (size_t idx = 0; idx < calib_info.size(); idx++) { std::cout << "\nThis tool computes the line fitting error (mean distance error) on image points extracted from " - "the raw distorted image." - << std::endl; + "the raw distorted image." + << std::endl; I = calib_info[idx].m_img; vpImageTools::undistort(I, cam, I_undist); @@ -546,8 +556,8 @@ int main(int argc, const char *argv[]) double line_fitting_error = vpMath::lineFitting(current_line, a, b, c); double line_fitting_error_undist = vpMath::lineFitting(current_line_undist, a, b, c); std::cout << calib_info[idx].m_frame_name << " line " << i + 1 - << " fitting error on distorted points: " << line_fitting_error - << " ; on undistorted points: " << line_fitting_error_undist << std::endl; + << " fitting error on distorted points: " << line_fitting_error + << " ; on undistorted points: " << line_fitting_error_undist << std::endl; vpImagePoint ip1 = current_line.front(); vpImagePoint ip2 = current_line.back(); @@ -555,8 +565,8 @@ int main(int argc, const char *argv[]) } std::cout << "\nThis tool computes the line fitting error (mean distance error) on image points extracted from " - "the undistorted image" - << " (vpImageTools::undistort())." << std::endl; + "the undistorted image" + << " (vpImageTools::undistort())." << std::endl; cv::Mat cvI; std::vector pointBuf; vpImageConvert::convert(I_undist, cvI); @@ -579,13 +589,14 @@ int main(int argc, const char *argv[]) double a = 0, b = 0, c = 0; double line_fitting_error = vpMath::lineFitting(current_line, a, b, c); std::cout << calib_info[idx].m_frame_name << " undistorted image, line " << i + 1 - << " fitting error: " << line_fitting_error << std::endl; + << " fitting error: " << line_fitting_error << std::endl; vpImagePoint ip1 = current_line.front() + vpImagePoint(0, I.getWidth()); vpImagePoint ip2 = current_line.back() + vpImagePoint(0, I.getWidth()); vpDisplay::displayLine(I_dist_undist, ip1, ip2, vpColor::red); } - } else { + } + else { std::string msg("Unable to detect grid on undistorted image"); std::cout << msg << std::endl; std::cout << "Check that the grid is not too close to the image edges" << std::endl; @@ -627,29 +638,31 @@ int main(int argc, const char *argv[]) if (xml.save(cam, opt_output_file_name.c_str(), opt_camera_name, I.getWidth(), I.getHeight(), ss_additional_info.str()) == vpXmlParserCamera::SEQUENCE_OK) std::cout << "Camera parameters without distortion successfully saved in \"" << opt_output_file_name << "\"" - << std::endl; + << std::endl; else { std::cout << "Failed to save the camera parameters without distortion in \"" << opt_output_file_name << "\"" - << std::endl; + << std::endl; std::cout << "A file with the same name exists. Remove it to be able " - "to save the parameters..." - << std::endl; + "to save the parameters..." + << std::endl; } std::cout << std::endl; std::cout << "Estimated pose using vpPoseVector format: [tx ty tz tux tuy tuz] with translation in meter and " - "rotation in rad" - << std::endl; + "rotation in rad" + << std::endl; for (unsigned int i = 0; i < calibrator.size(); i++) std::cout << "Estimated pose on input data extracted from " << calib_info[i].m_frame_name << ": " - << vpPoseVector(calibrator[i].cMo_dist).t() << std::endl; - } else { + << vpPoseVector(calibrator[i].cMo_dist).t() << std::endl; + } + else { std::cout << "Calibration with distortion failed." << std::endl; return EXIT_FAILURE; } std::cout << "\nCamera calibration succeeded. Results are savec in " << "\"" << opt_output_file_name << "\"" << std::endl; return EXIT_SUCCESS; - } catch (const vpException &e) { + } + catch (const vpException &e) { std::cout << "Catch an exception: " << e << std::endl; return EXIT_FAILURE; } @@ -657,9 +670,14 @@ int main(int argc, const char *argv[]) #else int main() { - std::cout << "OpenCV 2.3.0 or higher is requested to run the calibration." << std::endl; +#if !((VISP_HAVE_OPENCV_VERSION >= 0x030000) && defined(HAVE_OPENCV_CALIB3D) && defined(HAVE_OPENCV_HIGHGUI) && defined(HAVE_OPENCV_IMGPROC)) + std::cout << "OpenCV calib3d, highgui and imgproc modules are requested to run the calibration." << std::endl; std::cout << "Tip:" << std::endl; std::cout << "- Install OpenCV, configure again ViSP using cmake and build again this example" << std::endl; +#endif +#if !defined(VISP_HAVE_PUGIXML) + std::cout << "pugixml built-in 3rdparty is requested to run the calibration." << std::endl; +#endif return EXIT_SUCCESS; } #endif diff --git a/example/device/framegrabber/saveRealSenseData.cpp b/example/device/framegrabber/saveRealSenseData.cpp index 989510085b..06e56f8e45 100644 --- a/example/device/framegrabber/saveRealSenseData.cpp +++ b/example/device/framegrabber/saveRealSenseData.cpp @@ -40,7 +40,7 @@ #include #if (defined(VISP_HAVE_REALSENSE) || defined(VISP_HAVE_REALSENSE2)) && \ - (defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI)) + (defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI)) && defined(VISP_HAVE_PUGIXML) #include #include @@ -728,6 +728,10 @@ int main(int argc, char *argv[]) int main() { std::cerr << "Need libRealSense or libRealSense2 and C++11 and displayX or displayGDI!" << std::endl; + +#if !defined(VISP_HAVE_PUGIXML) + std::cout << "pugixml built-in 3rdparty is requested." << std::endl; +#endif return EXIT_SUCCESS; } #endif diff --git a/example/servo-bebop2/servoBebop2.cpp b/example/servo-bebop2/servoBebop2.cpp index 98bd516911..90fe22ab2f 100644 --- a/example/servo-bebop2/servoBebop2.cpp +++ b/example/servo-bebop2/servoBebop2.cpp @@ -73,6 +73,12 @@ int main() std::cout << "\nThis example requires ffmpeg library. You should install it.\n" << std::endl; return EXIT_SUCCESS; } +#elif !defined(VISP_HAVE_PUGIXML) +int main() +{ + std::cout << "\nThis example requires pugixml built-in 3rdparty library. You should enable it.\n" << std::endl; + return EXIT_SUCCESS; +} #else diff --git a/example/servo-flir-ptu/servoFlirPtuIBVS.cpp b/example/servo-flir-ptu/servoFlirPtuIBVS.cpp index 54f83f6f4b..329d749377 100644 --- a/example/servo-flir-ptu/servoFlirPtuIBVS.cpp +++ b/example/servo-flir-ptu/servoFlirPtuIBVS.cpp @@ -71,8 +71,8 @@ #include #include -#if defined(VISP_HAVE_FLIR_PTU_SDK) && defined(VISP_HAVE_FLYCAPTURE) && \ - (defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI)) +#if defined(VISP_HAVE_FLIR_PTU_SDK) && defined(VISP_HAVE_FLYCAPTURE) && \ + (defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI)) && defined(VISP_HAVE_PUGIXML) void display_point_trajectory(const vpImage &I, const vpImagePoint &ip, std::vector &traj_ip) @@ -82,7 +82,8 @@ void display_point_trajectory(const vpImage &I, const vpImagePoin if (vpImagePoint::distance(ip, traj_ip.back()) > 2.) { traj_ip.push_back(ip); } - } else { + } + else { traj_ip.push_back(ip); } for (size_t j = 1; j < traj_ip.size(); j++) { @@ -117,113 +118,127 @@ int main(int argc, char **argv) for (int i = 1; i < argc; i++) { if ((std::string(argv[i]) == "--portname" || std::string(argv[i]) == "-p") && (i + 1 < argc)) { opt_portname = std::string(argv[i + 1]); - } else if ((std::string(argv[i]) == "--baudrate" || std::string(argv[i]) == "-b") && (i + 1 < argc)) { + } + else if ((std::string(argv[i]) == "--baudrate" || std::string(argv[i]) == "-b") && (i + 1 < argc)) { opt_baudrate = std::atoi(argv[i + 1]); - } else if ((std::string(argv[i]) == "--network" || std::string(argv[i]) == "-n")) { + } + else if ((std::string(argv[i]) == "--network" || std::string(argv[i]) == "-n")) { opt_network = true; - } else if (std::string(argv[i]) == "--extrinsic" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--extrinsic" && i + 1 < argc) { opt_extrinsic = std::string(argv[i + 1]); - } else if (std::string(argv[i]) == "--intrinsic" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--intrinsic" && i + 1 < argc) { opt_intrinsic = std::string(argv[i + 1]); - } else if (std::string(argv[i]) == "--camera-name" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--camera-name" && i + 1 < argc) { opt_camera_name = std::string(argv[i + 1]); - } else if (std::string(argv[i]) == "--verbose" || std::string(argv[i]) == "-v") { + } + else if (std::string(argv[i]) == "--verbose" || std::string(argv[i]) == "-v") { opt_verbose = true; - } else if (std::string(argv[i]) == "--plot" || std::string(argv[i]) == "-p") { + } + else if (std::string(argv[i]) == "--plot" || std::string(argv[i]) == "-p") { opt_plot = true; - } else if (std::string(argv[i]) == "--display-image-trajectory" || std::string(argv[i]) == "-traj") { + } + else if (std::string(argv[i]) == "--display-image-trajectory" || std::string(argv[i]) == "-traj") { opt_display_trajectory = true; - } else if (std::string(argv[i]) == "--adaptive-gain" || std::string(argv[i]) == "-a") { + } + else if (std::string(argv[i]) == "--adaptive-gain" || std::string(argv[i]) == "-a") { opt_adaptive_gain = true; - } else if (std::string(argv[i]) == "--constant-gain" || std::string(argv[i]) == "-g") { + } + else if (std::string(argv[i]) == "--constant-gain" || std::string(argv[i]) == "-g") { opt_constant_gain = std::stod(argv[i + 1]); - } else if (std::string(argv[i]) == "--task-sequencing") { + } + else if (std::string(argv[i]) == "--task-sequencing") { opt_task_sequencing = true; - } else if (std::string(argv[i]) == "--quad-decimate" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--quad-decimate" && i + 1 < argc) { opt_quad_decimate = std::stoi(argv[i + 1]); } if (std::string(argv[i]) == "--tag-size" && i + 1 < argc) { opt_tag_size = std::stod(argv[i + 1]); - } else if (std::string(argv[i]) == "--no-convergence-threshold") { + } + else if (std::string(argv[i]) == "--no-convergence-threshold") { convergence_threshold = 0.; - } else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") { + } + else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") { std::cout << "SYNOPSIS" << std::endl - << " " << argv[0] << " [--portname ] [--baudrate ] [--network] " - << "[--extrinsic ] [--intrinsic ] [--camera-name ] " - << "[--quad-decimate ] [--tag-size ] " - << "[--adaptive-gain] [--constant-gain] [--display-image-trajectory] [--plot] [--task-sequencing] " - << "[--no-convergence-threshold] [--verbose] [--help] [-h]" << std::endl - << std::endl; + << " " << argv[0] << " [--portname ] [--baudrate ] [--network] " + << "[--extrinsic ] [--intrinsic ] [--camera-name ] " + << "[--quad-decimate ] [--tag-size ] " + << "[--adaptive-gain] [--constant-gain] [--display-image-trajectory] [--plot] [--task-sequencing] " + << "[--no-convergence-threshold] [--verbose] [--help] [-h]" << std::endl + << std::endl; std::cout << "DESCRIPTION" << std::endl - << " --portname, -p " << std::endl - << " Set serial or tcp port name." << std::endl - << std::endl - << " --baudrate, -b " << std::endl - << " Set serial communication baud rate. Default: " << opt_baudrate << "." << std::endl - << std::endl - << " --network, -n" << std::endl - << " Get PTU network information (Hostname, IP, Gateway) and exit. " << std::endl - << std::endl - << " --reset, -r" << std::endl - << " Reset PTU axis and exit. " << std::endl - << std::endl - << " --extrinsic " << std::endl - << " YAML file containing extrinsic camera parameters as a vpHomogeneousMatrix." << std::endl - << " It corresponds to the homogeneous transformation eMc, between end-effector" << std::endl - << " and camera frame." << std::endl - << std::endl - << " --intrinsic " << std::endl - << " Intrinsic camera parameters obtained after camera calibration." << std::endl - << std::endl - << " --camera-name " << std::endl - << " Name of the camera to consider in the xml file provided for intrinsic camera parameters." - << std::endl - << std::endl - << " --quad-decimate " << std::endl - << " Decimation factor used to detect AprilTag. Default " << opt_quad_decimate << "." << std::endl - << std::endl - << " --tag-size " << std::endl - << " Width in meter or the black part of the AprilTag used as target. Default " << opt_tag_size - << "." << std::endl - << std::endl - << " --adaptive-gain, -a" << std::endl - << " Enable adaptive gain instead of constant gain to speed up convergence. " << std::endl - << std::endl - << " --constant-gain, -g" << std::endl - << " Constant gain value. Default value: " << opt_constant_gain << std::endl - << std::endl - << " --display-image-trajectory, -traj" << std::endl - << " Display the trajectory of the target cog in the image. " << std::endl - << std::endl - << " --plot, -p" << std::endl - << " Enable curve plotter. " << std::endl - << std::endl - << " --task-sequencing" << std::endl - << " Enable task sequencing that allows to smoothly control the velocity of the robot. " << std::endl - << std::endl - << " --no-convergence-threshold" << std::endl - << " Disable ending servoing when it reaches the desired position." << std::endl - << std::endl - << " --verbose, -v" << std::endl - << " Additional printings. " << std::endl - << std::endl - << " --help, -h" << std::endl - << " Print this helper message. " << std::endl - << std::endl; + << " --portname, -p " << std::endl + << " Set serial or tcp port name." << std::endl + << std::endl + << " --baudrate, -b " << std::endl + << " Set serial communication baud rate. Default: " << opt_baudrate << "." << std::endl + << std::endl + << " --network, -n" << std::endl + << " Get PTU network information (Hostname, IP, Gateway) and exit. " << std::endl + << std::endl + << " --reset, -r" << std::endl + << " Reset PTU axis and exit. " << std::endl + << std::endl + << " --extrinsic " << std::endl + << " YAML file containing extrinsic camera parameters as a vpHomogeneousMatrix." << std::endl + << " It corresponds to the homogeneous transformation eMc, between end-effector" << std::endl + << " and camera frame." << std::endl + << std::endl + << " --intrinsic " << std::endl + << " Intrinsic camera parameters obtained after camera calibration." << std::endl + << std::endl + << " --camera-name " << std::endl + << " Name of the camera to consider in the xml file provided for intrinsic camera parameters." + << std::endl + << std::endl + << " --quad-decimate " << std::endl + << " Decimation factor used to detect AprilTag. Default " << opt_quad_decimate << "." << std::endl + << std::endl + << " --tag-size " << std::endl + << " Width in meter or the black part of the AprilTag used as target. Default " << opt_tag_size + << "." << std::endl + << std::endl + << " --adaptive-gain, -a" << std::endl + << " Enable adaptive gain instead of constant gain to speed up convergence. " << std::endl + << std::endl + << " --constant-gain, -g" << std::endl + << " Constant gain value. Default value: " << opt_constant_gain << std::endl + << std::endl + << " --display-image-trajectory, -traj" << std::endl + << " Display the trajectory of the target cog in the image. " << std::endl + << std::endl + << " --plot, -p" << std::endl + << " Enable curve plotter. " << std::endl + << std::endl + << " --task-sequencing" << std::endl + << " Enable task sequencing that allows to smoothly control the velocity of the robot. " << std::endl + << std::endl + << " --no-convergence-threshold" << std::endl + << " Disable ending servoing when it reaches the desired position." << std::endl + << std::endl + << " --verbose, -v" << std::endl + << " Additional printings. " << std::endl + << std::endl + << " --help, -h" << std::endl + << " Print this helper message. " << std::endl + << std::endl; std::cout << "EXAMPLE" << std::endl - << " - How to get network IP" << std::endl + << " - How to get network IP" << std::endl #ifdef _WIN32 - << " $ " << argv[0] << " --portname COM1 --network" << std::endl - << " Try to connect FLIR PTU to port: COM1 with baudrate: 9600" << std::endl + << " $ " << argv[0] << " --portname COM1 --network" << std::endl + << " Try to connect FLIR PTU to port: COM1 with baudrate: 9600" << std::endl #else - << " $ " << argv[0] << " -p /dev/ttyUSB0 --network" << std::endl - << " Try to connect FLIR PTU to port: /dev/ttyUSB0 with baudrate: 9600" << std::endl + << " $ " << argv[0] << " -p /dev/ttyUSB0 --network" << std::endl + << " Try to connect FLIR PTU to port: /dev/ttyUSB0 with baudrate: 9600" << std::endl #endif - << " PTU HostName: PTU-5" << std::endl - << " PTU IP : 169.254.110.254" << std::endl - << " PTU Gateway : 0.0.0.0" << std::endl - << " - How to run this binary using network communication" << std::endl - << " $ " << argv[0] << " --portname tcp:169.254.110.254 --tag-size 0.1 --gain 0.1" << std::endl; + << " PTU HostName: PTU-5" << std::endl + << " PTU IP : 169.254.110.254" << std::endl + << " PTU Gateway : 0.0.0.0" << std::endl + << " - How to run this binary using network communication" << std::endl + << " $ " << argv[0] << " --portname tcp:169.254.110.254 --tag-size 0.1 --gain 0.1" << std::endl; return EXIT_SUCCESS; } @@ -259,24 +274,25 @@ int main(int argc, char **argv) vpPoseVector ePc; ePc.loadYAML(opt_extrinsic, ePc); eMc.buildFrom(ePc); - } else { + } + else { std::cout << "***************************************************************" << std::endl; std::cout << "Warning, use hard coded values for extrinsic camera parameters." << std::endl; std::cout << "Create a yaml file that contains the extrinsic:" << std::endl - << std::endl - << "$ cat eMc.yaml" << std::endl - << "rows: 4" << std::endl - << "cols: 4" << std::endl - << "data:" << std::endl - << " - [0, 0, 1, -0.1]" << std::endl - << " - [-1, 0, 0, -0.123]" << std::endl - << " - [0, -1, 0, 0.035]" << std::endl - << " - [0, 0, 0, 1]" << std::endl - << std::endl - << "and load this file with [--extrinsic // Check if std:c++17 or higher -#if defined(VISP_HAVE_MAVSDK) && ((__cplusplus >= 201703L) || (defined(_MSVC_LANG) && (_MSVC_LANG >= 201703L))) && defined(VISP_HAVE_REALSENSE2) +#if defined(VISP_HAVE_MAVSDK) && ((__cplusplus >= 201703L) || (defined(_MSVC_LANG) && (_MSVC_LANG >= 201703L))) && \ + defined(VISP_HAVE_REALSENSE2) && defined(VISP_HAVE_PUGIXML) #include #include @@ -574,12 +575,13 @@ int main(int argc, char **argv) int main() { #ifndef VISP_HAVE_MAVSDK - std::cout << "\nThis example requires mavsdk library. You should install it, configure and rebuid ViSP.\n" - << std::endl; + std::cout << "\nThis example requires mavsdk library. You should install it, configure and rebuid ViSP.\n" << std::endl; #endif #ifndef VISP_HAVE_REALSENSE2 - std::cout << "\nThis example requires librealsense2 library. You should install it, configure and rebuid ViSP.\n" - << std::endl; + std::cout << "\nThis example requires librealsense2 library. You should install it, configure and rebuid ViSP.\n" << std::endl; +#endif +#if !defined(VISP_HAVE_PUGIXML) + std::cout << "\nThis example requires pugixml built-in 3rdparty." << std::endl; #endif #if !((__cplusplus >= 201703L) || (defined(_MSVC_LANG) && (_MSVC_LANG >= 201703L))) std::cout diff --git a/example/tracking/mbtEdgeKltTracking.cpp b/example/tracking/mbtEdgeKltTracking.cpp index 1cee25a362..8c86255546 100644 --- a/example/tracking/mbtEdgeKltTracking.cpp +++ b/example/tracking/mbtEdgeKltTracking.cpp @@ -269,8 +269,8 @@ int main(int argc, const char **argv) // Read the command line options if (!getOptions(argc, argv, opt_ipath, opt_configFile, opt_modelFile, opt_initFile, opt_lastFrame, displayFeatures, - opt_click_allowed, opt_display, cao3DModel, trackCylinder, useOgre, showOgreConfigDialog, - useScanline, computeCovariance, projectionError)) { + opt_click_allowed, opt_display, cao3DModel, trackCylinder, useOgre, showOgreConfigDialog, + useScanline, computeCovariance, projectionError)) { return EXIT_FAILURE; } @@ -393,10 +393,11 @@ int main(int argc, const char **argv) vpCameraParameters cam; // Initialise the tracker: camera parameters, moving edge and KLT settings + +#if defined(VISP_HAVE_PUGIXML) // From the xml file tracker.loadConfigFile(configFile); - -#if 0 +#else // Corresponding parameters manually set to have an example code // By setting the parameters: cam.initPersProjWithoutDistortion(547, 542, 338, 234); @@ -506,9 +507,9 @@ int main(int argc, const char **argv) if (opt_display) vpDisplay::display(I); tracker.resetTracker(); +#if defined(VISP_HAVE_PUGIXML) tracker.loadConfigFile(configFile); - -#if 0 +#else // Corresponding parameters manually set to have an example code // By setting the parameters: cam.initPersProjWithoutDistortion(547, 542, 338, 234); diff --git a/example/tracking/mbtEdgeTracking.cpp b/example/tracking/mbtEdgeTracking.cpp index 5381e916c9..f00940d239 100644 --- a/example/tracking/mbtEdgeTracking.cpp +++ b/example/tracking/mbtEdgeTracking.cpp @@ -269,8 +269,8 @@ int main(int argc, const char **argv) // Read the command line options if (!getOptions(argc, argv, opt_ipath, opt_configFile, opt_modelFile, opt_initFile, opt_lastFrame, displayFeatures, - opt_click_allowed, opt_display, cao3DModel, trackCylinder, useOgre, showOgreConfigDialog, - useScanline, computeCovariance, projectionError)) { + opt_click_allowed, opt_display, cao3DModel, trackCylinder, useOgre, showOgreConfigDialog, + useScanline, computeCovariance, projectionError)) { return EXIT_FAILURE; } @@ -393,9 +393,10 @@ int main(int argc, const char **argv) // Initialise the tracker: camera parameters, moving edge and KLT settings vpCameraParameters cam; +#if defined(VISP_HAVE_PUGIXML) // From the xml file tracker.loadConfigFile(configFile); -#if 0 +#else // Corresponding parameters manually set to have an example code // By setting the parameters: cam.initPersProjWithoutDistortion(547, 542, 338, 234); @@ -492,8 +493,10 @@ int main(int argc, const char **argv) if (opt_display) vpDisplay::display(I); tracker.resetTracker(); + +#if defined(VISP_HAVE_PUGIXML) tracker.loadConfigFile(configFile); -#if 0 +#else // Corresponding parameters manually set to have an example code // By setting the parameters: cam.initPersProjWithoutDistortion(547, 542, 338, 234); diff --git a/example/tracking/mbtGenericTracking.cpp b/example/tracking/mbtGenericTracking.cpp index 5249b137da..fc69649f08 100644 --- a/example/tracking/mbtGenericTracking.cpp +++ b/example/tracking/mbtGenericTracking.cpp @@ -62,8 +62,6 @@ #define GETOPTARGS "x:m:i:n:de:chtfColwvpT:" -#define USE_XML 0 - void usage(const char *name, const char *badparam) { #if VISP_HAVE_DATASET_VERSION >= 0x030600 @@ -276,8 +274,8 @@ int main(int argc, const char **argv) // Read the command line options if (!getOptions(argc, argv, opt_ipath, opt_configFile, opt_modelFile, opt_initFile, opt_lastFrame, displayFeatures, - opt_click_allowed, opt_display, cao3DModel, trackCylinder, useOgre, showOgreConfigDialog, - useScanline, computeCovariance, projectionError, trackerType)) { + opt_click_allowed, opt_display, cao3DModel, trackCylinder, useOgre, showOgreConfigDialog, + useScanline, computeCovariance, projectionError, trackerType)) { return EXIT_FAILURE; } @@ -299,7 +297,7 @@ int main(int argc, const char **argv) else ipath = vpIoTools::createFilePath(env_ipath, "mbt/cube/image%04d." + ext); -#if USE_XML +#if defined(VISP_HAVE_PUGIXML) std::string configFile; if (!opt_configFile.empty()) configFile = opt_configFile; @@ -411,7 +409,7 @@ int main(int argc, const char **argv) vpCameraParameters cam1, cam2; // Initialise the tracker: camera parameters, moving edge and KLT settings -#if USE_XML +#if defined(VISP_HAVE_PUGIXML) // From the xml file dynamic_cast(tracker)->loadConfigFile(configFile, configFile); #else @@ -541,7 +539,7 @@ int main(int argc, const char **argv) } tracker->resetTracker(); -#if USE_XML +#if defined(VISP_HAVE_PUGIXML) dynamic_cast(tracker)->loadConfigFile(configFile, configFile); #else // By setting the parameters: diff --git a/example/tracking/mbtGenericTracking2.cpp b/example/tracking/mbtGenericTracking2.cpp index 357dc9cf56..184e2d4ef9 100644 --- a/example/tracking/mbtGenericTracking2.cpp +++ b/example/tracking/mbtGenericTracking2.cpp @@ -62,8 +62,6 @@ #define GETOPTARGS "x:m:i:n:de:chtfColwvpT:" -#define USE_XML 0 - void usage(const char *name, const char *badparam) { #if VISP_HAVE_DATASET_VERSION >= 0x030600 @@ -276,8 +274,8 @@ int main(int argc, const char **argv) // Read the command line options if (!getOptions(argc, argv, opt_ipath, opt_configFile, opt_modelFile, opt_initFile, opt_lastFrame, displayFeatures, - opt_click_allowed, opt_display, cao3DModel, trackCylinder, useOgre, showOgreConfigDialog, - useScanline, computeCovariance, projectionError, trackerType)) { + opt_click_allowed, opt_display, cao3DModel, trackCylinder, useOgre, showOgreConfigDialog, + useScanline, computeCovariance, projectionError, trackerType)) { return EXIT_FAILURE; } @@ -299,7 +297,7 @@ int main(int argc, const char **argv) else ipath = vpIoTools::createFilePath(env_ipath, "mbt/cube/image%04d." + ext); -#if USE_XML +#if defined(VISP_HAVE_PUGIXML) std::string configFile; if (!opt_configFile.empty()) configFile = opt_configFile; @@ -425,7 +423,7 @@ int main(int argc, const char **argv) std::map mapOfCameraParams; // Initialise the tracker: camera parameters, moving edge and KLT settings -#if USE_XML +#if defined(VISP_HAVE_PUGIXML) // From the xml file std::map mapOfConfigFiles; mapOfConfigFiles["Camera1"] = configFile; @@ -592,7 +590,7 @@ int main(int argc, const char **argv) } tracker->resetTracker(); -#if USE_XML +#if defined(VISP_HAVE_PUGIXML) dynamic_cast(tracker)->loadConfigFile(mapOfConfigFiles); #else // By setting the parameters: diff --git a/example/tracking/mbtGenericTrackingDepth.cpp b/example/tracking/mbtGenericTrackingDepth.cpp index bad1cf3f8c..87d4f67d69 100644 --- a/example/tracking/mbtGenericTrackingDepth.cpp +++ b/example/tracking/mbtGenericTrackingDepth.cpp @@ -62,7 +62,6 @@ #define GETOPTARGS "x:X:m:M:i:n:dchfolwvpt:T:e:" -#define USE_XML 1 #define USE_SMALL_DATASET 1 // small depth dataset in ViSP-images namespace @@ -356,17 +355,17 @@ bool read_data(unsigned int cpt, const std::string &input_directory, vpImage(tracker)->loadConfigFile(configFile, configFile_depth); #else diff --git a/example/tracking/mbtGenericTrackingDepthOnly.cpp b/example/tracking/mbtGenericTrackingDepthOnly.cpp index 36228e84ca..b05d7e54c6 100644 --- a/example/tracking/mbtGenericTrackingDepthOnly.cpp +++ b/example/tracking/mbtGenericTrackingDepthOnly.cpp @@ -62,7 +62,6 @@ #define GETOPTARGS "X:M:i:n:dchfolwvpT:e:u:" -#define USE_XML 1 #define USE_SMALL_DATASET 1 // small depth dataset in ViSP-images namespace @@ -222,7 +221,8 @@ bool getOptions(int argc, const char **argv, std::string &ipath, std::string &co return true; } -struct vpRealsenseIntrinsics_t { +struct vpRealsenseIntrinsics_t +{ float ppx; /**< Horizontal coordinate of the principal point of the image, as a pixel offset from the left edge */ float ppy; /**< Vertical coordinate of the principal point of the image, as @@ -325,7 +325,7 @@ bool read_data(unsigned int cpt, const std::string &input_directory, vpImage(tracker)->loadConfigFile(configFile_depth); #else @@ -448,9 +448,9 @@ int main(int argc, const char **argv) usage(argv[0], nullptr); std::cerr << std::endl << "ERROR:" << std::endl; std::cerr << " Use -i option or set VISP_INPUT_IMAGE_PATH " << std::endl - << " environment variable to specify the location of the " << std::endl - << " image path where test images are located." << std::endl - << std::endl; + << " environment variable to specify the location of the " << std::endl + << " image path where test images are located." << std::endl + << std::endl; return EXIT_FAILURE; } @@ -469,26 +469,26 @@ int main(int argc, const char **argv) configFile_depth = opt_configFile_depth; else configFile_depth = - vpIoTools::createFilePath(!opt_ipath.empty() ? opt_ipath : env_ipath, "mbt-depth/castel/chateau_depth.xml"); + vpIoTools::createFilePath(!opt_ipath.empty() ? opt_ipath : env_ipath, "mbt-depth/castel/chateau_depth.xml"); std::string modelFile_depth; if (!opt_modelFile_depth.empty()) modelFile_depth = opt_modelFile_depth; else modelFile_depth = - vpIoTools::createFilePath(!opt_ipath.empty() ? opt_ipath : env_ipath, "mbt-depth/castel/chateau.cao"); + vpIoTools::createFilePath(!opt_ipath.empty() ? opt_ipath : env_ipath, "mbt-depth/castel/chateau.cao"); std::string vrml_ext = ".wrl"; bool use_vrml = - (modelFile_depth.compare(modelFile_depth.length() - vrml_ext.length(), vrml_ext.length(), vrml_ext) == 0); + (modelFile_depth.compare(modelFile_depth.length() - vrml_ext.length(), vrml_ext.length(), vrml_ext) == 0); if (use_vrml) { #if defined(VISP_HAVE_COIN3D) && (COIN_MAJOR_VERSION == 2 || COIN_MAJOR_VERSION == 3 || COIN_MAJOR_VERSION == 4) std::cout << "use_vrml: " << use_vrml << std::endl; #else std::cerr << "Error: vrml model file is only supported if ViSP is " - "build with Coin3D 3rd party" - << std::endl; + "build with Coin3D 3rd party" + << std::endl; return EXIT_FAILURE; #endif } @@ -572,7 +572,7 @@ int main(int argc, const char **argv) cam_color.initPersProjWithoutDistortion(615.1674804688, 615.1675415039, 312.1889953613, 243.4373779297); vpHomogeneousMatrix depth_M_color; std::string depth_M_color_filename = - vpIoTools::createFilePath(!opt_ipath.empty() ? opt_ipath : env_ipath, "mbt-depth/castel/depth_M_color.txt"); + vpIoTools::createFilePath(!opt_ipath.empty() ? opt_ipath : env_ipath, "mbt-depth/castel/depth_M_color.txt"); { std::ifstream depth_M_color_file(depth_M_color_filename.c_str()); depth_M_color.load(depth_M_color_file); @@ -596,7 +596,8 @@ int main(int argc, const char **argv) dynamic_cast(tracker)->getPose(cMo); // display the 3D model at the given pose dynamic_cast(tracker)->display(I_depth, cMo, cam, vpColor::red); - } else { + } + else { vpHomogeneousMatrix cMoi(0.04431452054, 0.09294637757, 0.3357760654, -2.677922443, 0.121297639, -0.6028463357); dynamic_cast(tracker)->initFromPose(I_depth, cMoi); } @@ -757,12 +758,12 @@ int main(int argc, const char **argv) } frame_index++; - } + } std::cout << "\nFinal poses, cMo:\n" << cMo << std::endl; std::cout << "\nComputation time, Mean: " << vpMath::getMean(time_vec) - << " ms ; Median: " << vpMath::getMedian(time_vec) << " ms ; Std: " << vpMath::getStdev(time_vec) << " ms" - << std::endl; + << " ms ; Median: " << vpMath::getMedian(time_vec) << " ms ; Std: " << vpMath::getStdev(time_vec) << " ms" + << std::endl; if (opt_click_allowed && !quit) { vpDisplay::getClick(I_depth); @@ -772,11 +773,12 @@ int main(int argc, const char **argv) tracker = nullptr; return EXIT_SUCCESS; - } catch (const vpException &e) { + } + catch (const vpException &e) { std::cout << "Catch an exception: " << e << std::endl; return EXIT_FAILURE; } -} + } #elif !(defined(VISP_HAVE_MODULE_MBT) && defined(VISP_HAVE_DISPLAY)) int main() diff --git a/example/tracking/mbtKltTracking.cpp b/example/tracking/mbtKltTracking.cpp index 8c6ba3f3d7..d9f1cf4070 100644 --- a/example/tracking/mbtKltTracking.cpp +++ b/example/tracking/mbtKltTracking.cpp @@ -261,9 +261,9 @@ int main(int argc, const char **argv) usage(argv[0], nullptr); std::cerr << std::endl << "ERROR:" << std::endl; std::cerr << " Use -i option or set VISP_INPUT_IMAGE_PATH " << std::endl - << " environment variable to specify the location of the " << std::endl - << " image path where test images are located." << std::endl - << std::endl; + << " environment variable to specify the location of the " << std::endl + << " image path where test images are located." << std::endl + << std::endl; return EXIT_FAILURE; } @@ -283,14 +283,16 @@ int main(int argc, const char **argv) if (!opt_modelFile.empty()) { modelFile = opt_modelFile; - } else { + } + else { std::string modelFileCao = "mbt/cube.cao"; std::string modelFileWrl = "mbt/cube.wrl"; if (!opt_ipath.empty()) { if (cao3DModel) { modelFile = vpIoTools::createFilePath(opt_ipath, modelFileCao); - } else { + } + else { #ifdef VISP_HAVE_COIN3D modelFile = vpIoTools::createFilePath(opt_ipath, modelFileWrl); #else @@ -298,10 +300,12 @@ int main(int argc, const char **argv) modelFile = vpIoTools::createFilePath(opt_ipath, modelFileCao); #endif } - } else { + } + else { if (cao3DModel) { modelFile = vpIoTools::createFilePath(env_ipath, modelFileCao); - } else { + } + else { #ifdef VISP_HAVE_COIN3D modelFile = vpIoTools::createFilePath(env_ipath, modelFileWrl); #else @@ -325,7 +329,8 @@ int main(int argc, const char **argv) reader.setFileName(ipath); try { reader.open(I); - } catch (...) { + } + catch (...) { std::cout << "Cannot open sequence: " << ipath << std::endl; return EXIT_FAILURE; } @@ -362,9 +367,11 @@ int main(int argc, const char **argv) // Load tracker config file (camera parameters and moving edge settings) vpCameraParameters cam; + +#if defined(VISP_HAVE_PUGIXML) // From the xml file tracker.loadConfigFile(configFile); -#if 0 +#else // Corresponding parameters manually set to have an example code // By setting the parameters: cam.initPersProjWithoutDistortion(547, 542, 338, 234); @@ -433,7 +440,8 @@ int main(int argc, const char **argv) tracker.getPose(cMo); // display the 3D model at the given pose tracker.display(I, cMo, cam, vpColor::red); - } else { + } + else { vpHomogeneousMatrix cMoi(0.02044769891, 0.1101505452, 0.5078963719, 2.063603907, 1.110231561, -0.4392789872); tracker.initFromPose(I, cMoi); } @@ -458,9 +466,11 @@ int main(int argc, const char **argv) if (opt_display) vpDisplay::display(I); tracker.resetTracker(); + +#if defined(VISP_HAVE_PUGIXML) tracker.loadConfigFile(configFile); -#if 0 - // Corresponding parameters manually set to have an example code +#else + // Corresponding parameters manually set to have an example code // By setting the parameters: cam.initPersProjWithoutDistortion(547, 542, 338, 234); @@ -550,7 +560,8 @@ int main(int argc, const char **argv) reader.close(); return EXIT_SUCCESS; - } catch (const vpException &e) { + } + catch (const vpException &e) { std::cout << "Catch an exception: " << e << std::endl; return EXIT_FAILURE; } @@ -561,8 +572,8 @@ int main(int argc, const char **argv) int main() { std::cout << "visp_mbt, visp_gui modules and OpenCV are required to run " - "this example." - << std::endl; + "this example." + << std::endl; return EXIT_SUCCESS; } diff --git a/modules/core/CMakeLists.txt b/modules/core/CMakeLists.txt index 63e6f3c688..2d766d6700 100644 --- a/modules/core/CMakeLists.txt +++ b/modules/core/CMakeLists.txt @@ -38,9 +38,12 @@ # Add optional 3rd parties set(opt_incs "") set(opt_libs "") +set(opt_libs_private "") -# Add private libraries -set(PRIVATE_LIBRARIES ${LAPACK_LIBRARIES} ${PUGIXML_LIBRARIES}) +if(WITH_LAPACK) + # lapack is private + set(opt_libs_private ${LAPACK_LIBRARIES}) +endif() # Add library ws2_32.a or ws2_32.lib for vpNetwork class if(WS2_32_FOUND) @@ -196,7 +199,7 @@ if(USE_YARP) add_definitions(${YARP_DEFINES}) endif(USE_YARP) -# Math: eigen3, gsl, lapack, OpenCV +# Math: eigen3, gsl, mkl, openblas, atlas, netlib, OpenCV if(USE_EIGEN3) if(EIGEN3_INCLUDE_DIRS) list(APPEND opt_incs ${EIGEN3_INCLUDE_DIRS}) @@ -204,22 +207,27 @@ if(USE_EIGEN3) list(APPEND opt_incs ${EIGEN3_INCLUDE_DIR}) endif() endif() + if(USE_GSL) list(APPEND opt_incs ${GSL_INCLUDE_DIRS}) list(APPEND opt_libs ${GSL_LIBRARIES}) endif() + if(USE_MKL) list(APPEND opt_incs ${MKL_INCLUDE_DIRS}) list(APPEND opt_libs ${MKL_LIBRARIES}) endif() + if(USE_OPENBLAS) list(APPEND opt_incs ${OpenBLAS_INCLUDE_DIR}) list(APPEND opt_libs ${OpenBLAS_LIBRARIES}) endif() + if(USE_ATLAS) list(APPEND opt_incs ${Atlas_INCLUDE_DIR}) list(APPEND opt_libs ${Atlas_LIBRARIES}) endif() + if(USE_NETLIB) list(APPEND opt_libs ${NETLIB_LIBRARIES}) endif() @@ -229,15 +237,18 @@ if(USE_XML2) list(APPEND opt_incs ${XML2_INCLUDE_DIRS}) list(APPEND opt_libs ${XML2_LIBRARIES}) endif() + if(USE_THREADS) if(CMAKE_THREAD_LIBS_INIT) list(APPEND opt_libs "${CMAKE_THREAD_LIBS_INIT}") endif() endif() + if(USE_ZLIB) list(APPEND opt_incs ${ZLIB_INCLUDE_DIRS}) list(APPEND opt_libs ${ZLIB_LIBRARIES}) endif() + if(USE_OPENMP) list(APPEND opt_incs ${OpenMP_CXX_INCLUDE_DIRS}) # Because there is an explicit link to libpthread location that breaks visp conda package usage on linux @@ -254,14 +265,17 @@ if(USE_OPENMP) endif() endforeach() endif() + if(USE_NLOHMANN_JSON) get_target_property(_inc_dirs "nlohmann_json::nlohmann_json" INTERFACE_INCLUDE_DIRECTORIES) list(APPEND opt_incs ${_inc_dirs}) endif() -# pugixml is always enabled to provide default XML I/O capabilities -# pugixml is private -include_directories(${PUGIXML_INCLUDE_DIRS}) +if(WITH_PUGIXML) + # pugixml is private and provides default XML I/O capabilities + include_directories(${PUGIXML_INCLUDE_DIRS}) + list(APPEND opt_libs_private ${PUGIXML_LIBRARIES}) +endif() if(WITH_CATCH2) # catch2 is private @@ -271,7 +285,7 @@ endif() if(WITH_SIMDLIB) # Simd lib is private include_directories(${SIMDLIB_INCLUDE_DIRS}) - list(APPEND PRIVATE_LIBRARIES ${SIMDLIB_LIBRARIES}) + list(APPEND opt_libs_private ${SIMDLIB_LIBRARIES}) endif() if(MSVC) @@ -374,7 +388,7 @@ if(USE_XML2) endif() endif() -vp_add_module(core PRIVATE_OPTIONAL ${PRIVATE_LIBRARIES} WRAP java) +vp_add_module(core PRIVATE_OPTIONAL ${opt_libs_private} WRAP java) #----------------------------------------------------------------------------- # Enable large file support diff --git a/modules/core/include/visp3/core/vpXmlParserCamera.h b/modules/core/include/visp3/core/vpXmlParserCamera.h index 838a5f378d..565f739b9e 100644 --- a/modules/core/include/visp3/core/vpXmlParserCamera.h +++ b/modules/core/include/visp3/core/vpXmlParserCamera.h @@ -43,6 +43,7 @@ #include +#if defined(VISP_HAVE_PUGIXML) #include /*! @@ -198,3 +199,4 @@ class VISP_EXPORT vpXmlParserCamera Impl *m_impl; }; #endif +#endif diff --git a/modules/core/include/visp3/core/vpXmlParserHomogeneousMatrix.h b/modules/core/include/visp3/core/vpXmlParserHomogeneousMatrix.h index 567c8fb631..d8243259ee 100644 --- a/modules/core/include/visp3/core/vpXmlParserHomogeneousMatrix.h +++ b/modules/core/include/visp3/core/vpXmlParserHomogeneousMatrix.h @@ -44,6 +44,7 @@ #include +#if defined(VISP_HAVE_PUGIXML) #include /*! @@ -171,3 +172,4 @@ class VISP_EXPORT vpXmlParserHomogeneousMatrix Impl *m_impl; }; #endif +#endif diff --git a/modules/core/include/visp3/core/vpXmlParserRectOriented.h b/modules/core/include/visp3/core/vpXmlParserRectOriented.h index ce1073efe4..52dcff8e69 100644 --- a/modules/core/include/visp3/core/vpXmlParserRectOriented.h +++ b/modules/core/include/visp3/core/vpXmlParserRectOriented.h @@ -42,6 +42,7 @@ #include +#if defined(VISP_HAVE_PUGIXML) #include /*! @@ -107,4 +108,5 @@ class VISP_EXPORT vpXmlParserRectOriented class Impl; Impl *m_impl; }; +#endif #endif // vpXmlParserRectOriented_h diff --git a/modules/core/src/camera/vpXmlParserCamera.cpp b/modules/core/src/camera/vpXmlParserCamera.cpp index 642b012881..034bb0ec17 100644 --- a/modules/core/src/camera/vpXmlParserCamera.cpp +++ b/modules/core/src/camera/vpXmlParserCamera.cpp @@ -41,6 +41,7 @@ #include +#if defined(VISP_HAVE_PUGIXML) #include #include @@ -1226,3 +1227,9 @@ void vpXmlParserCamera::setSubsampling_width(unsigned int subsampling) { m_impl- void vpXmlParserCamera::setSubsampling_height(unsigned int subsampling) { m_impl->setSubsampling_height(subsampling); } void vpXmlParserCamera::setWidth(unsigned int width) { m_impl->setWidth(width); } + +#elif !defined(VISP_BUILD_SHARED_LIBS) +// Work around to avoid warning: libvisp_core.a(vpXmlParserCamera.cpp.o) has no symbols +void dummy_vpXmlParserCamera() { }; + +#endif diff --git a/modules/core/src/math/transformation/vpXmlParserHomogeneousMatrix.cpp b/modules/core/src/math/transformation/vpXmlParserHomogeneousMatrix.cpp index 66ded5db86..10a23d4fb7 100644 --- a/modules/core/src/math/transformation/vpXmlParserHomogeneousMatrix.cpp +++ b/modules/core/src/math/transformation/vpXmlParserHomogeneousMatrix.cpp @@ -44,6 +44,7 @@ */ #include +#if defined(VISP_HAVE_PUGIXML) #include /* ----------------------------- LABEL XML ----------------------------- */ @@ -67,7 +68,8 @@ class vpXmlParserHomogeneousMatrix::Impl private: /* --- XML Code------------------------------------------------------------ */ - enum vpXmlCodeType { + enum vpXmlCodeType + { CODE_XML_BAD = -1, CODE_XML_OTHER, CODE_XML_M, @@ -82,7 +84,7 @@ class vpXmlParserHomogeneousMatrix::Impl }; public: - Impl() : m_M(), m_name() {} + Impl() : m_M(), m_name() { } int parse(vpHomogeneousMatrix &M, const std::string &filename, const std::string &name) { @@ -132,18 +134,20 @@ class vpXmlParserHomogeneousMatrix::Impl if (prop == CODE_XML_M) { if (SEQUENCE_OK == read_matrix(node, name)) nbM++; - } else + } + else back = SEQUENCE_ERROR; } if (nbM == 0) { back = SEQUENCE_ERROR; std::cerr << "No Homogeneous matrix is available" << std::endl << "with name: " << name << std::endl; - } else if (nbM > 1) { + } + else if (nbM > 1) { back = SEQUENCE_ERROR; std::cerr << nbM << " There are more Homogeneous matrix" << std::endl - << "with the same name : " << std::endl - << "precise your choice..." << std::endl; + << "with the same name : " << std::endl + << "precise your choice..." << std::endl; } return back; @@ -206,7 +210,8 @@ class vpXmlParserHomogeneousMatrix::Impl if (!(name == M_name_tmp)) { back = SEQUENCE_ERROR; - } else { + } + else { this->m_M = M_tmp; // std::cout << "Convert in Homogeneous Matrix:"<< std::endl; // std::cout << this-> M << std::endl; @@ -335,8 +340,8 @@ class vpXmlParserHomogeneousMatrix::Impl if (M_isFound) { std::cout << "There is already an homogeneous matrix " << std::endl - << "available in the file with the input name: " << name << "." << std::endl - << "Please delete it manually from the xml file." << std::endl; + << "available in the file with the input name: " << name << "." << std::endl + << "Please delete it manually from the xml file." << std::endl; return SEQUENCE_ERROR; } @@ -459,23 +464,32 @@ class vpXmlParserHomogeneousMatrix::Impl if (!strcmp(str, LABEL_XML_M)) { val_int = CODE_XML_M; - } else if (!strcmp(str, LABEL_XML_M_NAME)) { + } + else if (!strcmp(str, LABEL_XML_M_NAME)) { val_int = CODE_XML_M_NAME; - } else if (!strcmp(str, LABEL_XML_VALUE)) { + } + else if (!strcmp(str, LABEL_XML_VALUE)) { val_int = CODE_XML_VALUE; - } else if (!strcmp(str, LABEL_XML_TX)) { + } + else if (!strcmp(str, LABEL_XML_TX)) { val_int = CODE_XML_TX; - } else if (!strcmp(str, LABEL_XML_TY)) { + } + else if (!strcmp(str, LABEL_XML_TY)) { val_int = CODE_XML_TY; - } else if (!strcmp(str, LABEL_XML_TZ)) { + } + else if (!strcmp(str, LABEL_XML_TZ)) { val_int = CODE_XML_TZ; - } else if (!strcmp(str, LABEL_XML_TUX)) { + } + else if (!strcmp(str, LABEL_XML_TUX)) { val_int = CODE_XML_TUX; - } else if (!strcmp(str, LABEL_XML_TUY)) { + } + else if (!strcmp(str, LABEL_XML_TUY)) { val_int = CODE_XML_TUY; - } else if (!strcmp(str, LABEL_XML_TUZ)) { + } + else if (!strcmp(str, LABEL_XML_TUZ)) { val_int = CODE_XML_TUZ; - } else { + } + else { val_int = CODE_XML_OTHER; } res = val_int; @@ -494,7 +508,7 @@ class vpXmlParserHomogeneousMatrix::Impl }; #endif // DOXYGEN_SHOULD_SKIP_THIS -vpXmlParserHomogeneousMatrix::vpXmlParserHomogeneousMatrix() : m_impl(new Impl()) {} +vpXmlParserHomogeneousMatrix::vpXmlParserHomogeneousMatrix() : m_impl(new Impl()) { } vpXmlParserHomogeneousMatrix::~vpXmlParserHomogeneousMatrix() { delete m_impl; } @@ -539,3 +553,9 @@ void vpXmlParserHomogeneousMatrix::setHomogeneousMatrixName(const std::string &n { m_impl->setHomogeneousMatrixName(name); } + +#elif !defined(VISP_BUILD_SHARED_LIBS) +// Work around to avoid warning: libvisp_core.a(vpXmlParserHomogeneousMatrix.cpp.o) has no symbols +void dummy_vpXmlParserHomogeneousMatrix() { }; + +#endif diff --git a/modules/core/src/tools/geometry/vpXmlParserRectOriented.cpp b/modules/core/src/tools/geometry/vpXmlParserRectOriented.cpp index 2679726ce0..2822e9ef75 100644 --- a/modules/core/src/tools/geometry/vpXmlParserRectOriented.cpp +++ b/modules/core/src/tools/geometry/vpXmlParserRectOriented.cpp @@ -44,14 +44,18 @@ #include #include + +#if defined(VISP_HAVE_PUGIXML) #include + #include #ifndef DOXYGEN_SHOULD_SKIP_THIS class vpXmlParserRectOriented::Impl { private: - enum vpXmlCodeType { + enum vpXmlCodeType + { CODE_XML_BAD = -1, CODE_XML_OTHER, CODE_XML_CENTER_I, @@ -127,7 +131,8 @@ class vpXmlParserRectOriented::Impl root_node = doc.append_child(pugi::node_declaration); root_node.append_attribute("version") = "1.0"; root_node = doc.append_child("config"); - } else if (!append) { + } + else if (!append) { if (!vpIoTools::remove(filename)) throw vpException(vpException::ioError, "Cannot remove existing xml file"); @@ -168,7 +173,7 @@ class vpXmlParserRectOriented::Impl }; #endif // DOXYGEN_SHOULD_SKIP_THIS -vpXmlParserRectOriented::vpXmlParserRectOriented() : m_impl(new Impl()) {} +vpXmlParserRectOriented::vpXmlParserRectOriented() : m_impl(new Impl()) { } vpXmlParserRectOriented::~vpXmlParserRectOriented() { delete m_impl; } @@ -197,3 +202,9 @@ void vpXmlParserRectOriented::save(const std::string &filename, bool append) { m vpRectOriented vpXmlParserRectOriented::getRectangle() const { return m_impl->getRectangle(); } void vpXmlParserRectOriented::setRectangle(const vpRectOriented &rectangle) { m_impl->setRectangle(rectangle); } + +#elif !defined(VISP_BUILD_SHARED_LIBS) +// Work around to avoid warning: libvisp_core.a(vpXmlParserRectOriented.cpp.o) has no symbols +void dummy_vpXmlParserRectOriented() { }; + +#endif diff --git a/modules/core/test/camera/testXmlParserCamera.cpp b/modules/core/test/camera/testXmlParserCamera.cpp index 10e1202de3..f179ef151c 100644 --- a/modules/core/test/camera/testXmlParserCamera.cpp +++ b/modules/core/test/camera/testXmlParserCamera.cpp @@ -44,6 +44,8 @@ int main() { +#if defined(VISP_HAVE_PUGIXML) + #if defined(_WIN32) std::string tmp_dir = "C:/temp/"; #else @@ -266,6 +268,7 @@ int main() } vpIoTools::remove(tmp_dir); +#endif return EXIT_SUCCESS; } diff --git a/modules/core/test/image-with-dataset/testImageWarp.cpp b/modules/core/test/image-with-dataset/testImageWarp.cpp index eba7b093fc..3bebc67edb 100644 --- a/modules/core/test/image-with-dataset/testImageWarp.cpp +++ b/modules/core/test/image-with-dataset/testImageWarp.cpp @@ -58,10 +58,10 @@ static const double g_threshold_percentage_bilinear = 0.75; static const double g_threshold_percentage_pers = 0.75; static const double g_threshold_percentage_pers_bilinear = 0.65; -static const std::vector interp_methods = {vpImageTools::INTERPOLATION_NEAREST, - vpImageTools::INTERPOLATION_LINEAR}; -static const std::vector interp_names = {"Nearest Neighbor", "Bilinear"}; -static const std::vector suffixes = {"_NN.png", "_bilinear.png"}; +static const std::vector interp_methods = { vpImageTools::INTERPOLATION_NEAREST, + vpImageTools::INTERPOLATION_LINEAR }; +static const std::vector interp_names = { "Nearest Neighbor", "Bilinear" }; +static const std::vector suffixes = { "_NN.png", "_bilinear.png" }; bool almostEqual(const vpImage &I1, const vpImage &I2, double threshold_val, double threshold_percentage, double &percentage) @@ -180,6 +180,7 @@ TEST_CASE("Affine warp on grayscale", "[warp_image]") vpIoTools::getViSPImagesDataPath(), std::string("warp/cv_warp_affine_rot_45_gray" + suffixes[i])); REQUIRE(vpIoTools::checkFilename(refImgPath)); vpImage I_ref_opencv; + vpImageIo::read(I_ref_opencv, refImgPath); vpImage I_affine; @@ -548,10 +549,10 @@ TEST_CASE("Perspective warp on grayscale", "[warp_image]") double percentage = 0.0; bool equal = - almostEqual(I_ref, I_perspective, g_threshold_value, - (i == 0) ? g_threshold_percentage_pers : g_threshold_percentage_pers_bilinear, percentage); + almostEqual(I_ref, I_perspective, g_threshold_value, + (i == 0) ? g_threshold_percentage_pers : g_threshold_percentage_pers_bilinear, percentage); std::cout << "Percentage valid pixels (Homography " << interp_names[i] << " Ref): " << percentage - << std::endl; + << std::endl; CHECK(equal); } @@ -568,10 +569,10 @@ TEST_CASE("Perspective warp on grayscale", "[warp_image]") double percentage = 0.0; bool equal = - almostEqual(I_ref_opencv, I_perspective, g_threshold_value, - (i == 0) ? g_threshold_percentage_pers : g_threshold_percentage_pers_bilinear, percentage); + almostEqual(I_ref_opencv, I_perspective, g_threshold_value, + (i == 0) ? g_threshold_percentage_pers : g_threshold_percentage_pers_bilinear, percentage); std::cout << "Percentage valid pixels (Homography " << interp_names[i] << " OpenCV): " << percentage - << std::endl; + << std::endl; CHECK(equal); } @@ -588,10 +589,10 @@ TEST_CASE("Perspective warp on grayscale", "[warp_image]") double percentage = 0.0; bool equal = - almostEqual(I_ref_pil, I_perspective, g_threshold_value, - (i == 0) ? g_threshold_percentage_pers : g_threshold_percentage_pers_bilinear, percentage); + almostEqual(I_ref_pil, I_perspective, g_threshold_value, + (i == 0) ? g_threshold_percentage_pers : g_threshold_percentage_pers_bilinear, percentage); std::cout << "Percentage valid pixels (Homography " << interp_names[i] << " PIL): " << percentage - << std::endl; + << std::endl; CHECK(equal); } } @@ -660,10 +661,10 @@ TEST_CASE("Perspective warp on color", "[warp_image]") double percentage = 0.0; bool equal = - almostEqual(I_ref, I_perspective, g_threshold_value, - (i == 0) ? g_threshold_percentage_pers : g_threshold_percentage_pers_bilinear, percentage); + almostEqual(I_ref, I_perspective, g_threshold_value, + (i == 0) ? g_threshold_percentage_pers : g_threshold_percentage_pers_bilinear, percentage); std::cout << "Percentage valid pixels (Homography " << interp_names[i] << " Ref): " << percentage - << std::endl; + << std::endl; CHECK(equal); } @@ -680,10 +681,10 @@ TEST_CASE("Perspective warp on color", "[warp_image]") double percentage = 0.0; bool equal = - almostEqual(I_ref_opencv, I_perspective, g_threshold_value, - (i == 0) ? g_threshold_percentage_pers : g_threshold_percentage_pers_bilinear, percentage); + almostEqual(I_ref_opencv, I_perspective, g_threshold_value, + (i == 0) ? g_threshold_percentage_pers : g_threshold_percentage_pers_bilinear, percentage); std::cout << "Percentage valid pixels (Homography " << interp_names[i] << " OpenCV): " << percentage - << std::endl; + << std::endl; CHECK(equal); } @@ -700,10 +701,10 @@ TEST_CASE("Perspective warp on color", "[warp_image]") double percentage = 0.0; bool equal = - almostEqual(I_ref_pil, I_perspective, g_threshold_value, - (i == 0) ? g_threshold_percentage_pers : g_threshold_percentage_pers_bilinear, percentage); + almostEqual(I_ref_pil, I_perspective, g_threshold_value, + (i == 0) ? g_threshold_percentage_pers : g_threshold_percentage_pers_bilinear, percentage); std::cout << "Percentage valid pixels (Homography " << interp_names[i] << " PIL): " << percentage - << std::endl; + << std::endl; CHECK(equal); } } diff --git a/modules/core/test/math/testXmlParserHomogeneousMatrix.cpp b/modules/core/test/math/testXmlParserHomogeneousMatrix.cpp index 4f35ee6966..ffc21d3a3d 100644 --- a/modules/core/test/math/testXmlParserHomogeneousMatrix.cpp +++ b/modules/core/test/math/testXmlParserHomogeneousMatrix.cpp @@ -44,6 +44,7 @@ int main() { +#if defined(VISP_HAVE_PUGIXML) #if defined(_WIN32) std::string tmp_dir = "C:/temp/"; #else @@ -89,6 +90,7 @@ int main() } vpIoTools::remove(tmp_dir); +#endif return EXIT_SUCCESS; } diff --git a/modules/core/test/tools/geometry/testXmlParserRectOriented.cpp b/modules/core/test/tools/geometry/testXmlParserRectOriented.cpp index e2d75f902f..17b5c4b7e5 100644 --- a/modules/core/test/tools/geometry/testXmlParserRectOriented.cpp +++ b/modules/core/test/tools/geometry/testXmlParserRectOriented.cpp @@ -44,6 +44,7 @@ int main() { +#if defined(VISP_HAVE_PUGIXML) #if defined(_WIN32) std::string tmp_dir = "C:/temp/"; #else @@ -85,6 +86,7 @@ int main() } vpIoTools::remove(tmp_dir); +#endif return EXIT_SUCCESS; } diff --git a/modules/io/CMakeLists.txt b/modules/io/CMakeLists.txt index 0c0e900226..f93ece86d2 100644 --- a/modules/io/CMakeLists.txt +++ b/modules/io/CMakeLists.txt @@ -57,8 +57,10 @@ if(USE_PNG) add_definitions(${PNG_DEFINITIONS}) endif() -# stb_image is private -include_directories(${STBIMAGE_INCLUDE_DIRS}) +if(WITH_STBIMAGE) + # stb_image is private + include_directories(${STBIMAGE_INCLUDE_DIRS}) +endif() if(WITH_CATCH2) # catch2 is private @@ -70,10 +72,11 @@ if(USE_NLOHMANN_JSON) list(APPEND opt_incs ${_inc_dirs}) endif() -# simdlib is always enabled since it contains fallback code to plain C++ code -# Simd lib is private -include_directories(${SIMDLIB_INCLUDE_DIRS}) -list(APPEND opt_libs_private ${SIMDLIB_LIBRARIES}) +if(WITH_SIMDLIB) + # Simd lib is private + include_directories(${SIMDLIB_INCLUDE_DIRS}) + list(APPEND opt_libs_private ${SIMDLIB_LIBRARIES}) +endif() # TinyEXR lib is private include_directories(${TINYEXR_INCLUDE_DIRS}) diff --git a/modules/io/include/visp3/io/vpImageIo.h b/modules/io/include/visp3/io/vpImageIo.h index bf4eb446d0..49be1de4a8 100644 --- a/modules/io/include/visp3/io/vpImageIo.h +++ b/modules/io/include/visp3/io/vpImageIo.h @@ -98,7 +98,8 @@ int main() class VISP_EXPORT vpImageIo { private: - typedef enum { + typedef enum + { FORMAT_PGM, FORMAT_PPM, FORMAT_JPEG, @@ -118,10 +119,11 @@ class VISP_EXPORT vpImageIo public: //! Image IO backend for only jpeg and png formats image loading and saving - enum vpImageIoBackendType { + enum vpImageIoBackendType + { IO_DEFAULT_BACKEND, //!< Default backend - IO_SYSTEM_LIB_BACKEND, //!< Use system libraries like libpng or libjpeg - IO_OPENCV_BACKEND, //!< Use OpenCV + IO_SYSTEM_LIB_BACKEND, //!< Use system libraries like libpng or libjpeg-turbo + IO_OPENCV_BACKEND, //!< Use OpenCV imgcodecs module IO_SIMDLIB_BACKEND, //!< Use embedded simd library IO_STB_IMAGE_BACKEND //!< Use embedded stb_image library }; diff --git a/modules/io/src/image/private/vpImageIoBackend.h b/modules/io/src/image/private/vpImageIoBackend.h index d41c61d95e..bff9f1dbf5 100644 --- a/modules/io/src/image/private/vpImageIoBackend.h +++ b/modules/io/src/image/private/vpImageIoBackend.h @@ -106,7 +106,8 @@ void writeEXRTiny(const vpImage &I, const std::string &filename); void writeEXRTiny(const vpImage &I, const std::string &filename); #endif -// stb lib +#if defined(VISP_HAVE_STBIMAGE) +// stb_image lib void readStb(vpImage &I, const std::string &filename); void readStb(vpImage &I, const std::string &filename); @@ -115,5 +116,6 @@ void writeJPEGStb(const vpImage &I, const std::string &filename, int qua void writePNGStb(const vpImage &I, const std::string &filename); void writePNGStb(const vpImage &I, const std::string &filename); +#endif #endif diff --git a/modules/io/src/image/private/vpImageIoStb.cpp b/modules/io/src/image/private/vpImageIoStb.cpp index 40d52ba5e8..253b3a986a 100644 --- a/modules/io/src/image/private/vpImageIoStb.cpp +++ b/modules/io/src/image/private/vpImageIoStb.cpp @@ -36,6 +36,10 @@ \brief stb backend for JPEG and PNG image I/O operations. */ +#include + +#if defined(VISP_HAVE_STBIMAGE) + #include "vpImageIoBackend.h" #if defined __SSE2__ || defined _M_X64 || (defined _M_IX86_FP && _M_IX86_FP >= 2) @@ -111,3 +115,5 @@ void writePNGStb(const vpImage &I, const std::string &filename) throw(vpImageException(vpImageException::ioError, "PNG write error: %s", filename.c_str())); } } + +#endif diff --git a/modules/io/src/image/private/vpImageIoTinyEXR.cpp b/modules/io/src/image/private/vpImageIoTinyEXR.cpp index 99bacc8c85..add5590864 100644 --- a/modules/io/src/image/private/vpImageIoTinyEXR.cpp +++ b/modules/io/src/image/private/vpImageIoTinyEXR.cpp @@ -39,7 +39,8 @@ #include -#if defined(VISP_HAVE_TINYEXR) +#if defined(VISP_HAVE_STBIMAGE) && defined(VISP_HAVE_TINYEXR) + #include "vpImageIoBackend.h" #define TINYEXR_USE_MINIZ 0 diff --git a/modules/io/src/image/vpImageIo.cpp b/modules/io/src/image/vpImageIo.cpp index 1ca9ddab68..b0cec21dd4 100644 --- a/modules/io/src/image/vpImageIo.cpp +++ b/modules/io/src/image/vpImageIo.cpp @@ -396,15 +396,13 @@ void vpImageIo::readJPEG(vpImage &I, const std::string &filename, { if (backend == IO_SYSTEM_LIB_BACKEND) { #if !defined(VISP_HAVE_JPEG) - std::string message = - "Libjpeg backend is not available to read file \"" + filename + "\": switch to stb_image backend"; + // Libjpeg backend is not available to read file \"" + filename + "\": switch to stb_image backend backend = IO_STB_IMAGE_BACKEND; #endif } else if (backend == IO_OPENCV_BACKEND) { #if !(defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGCODECS)) - std::string message = - "OpenCV backend is not available to read file \"" + filename + "\": switch to stb_image backend"; + // OpenCV backend is not available to read file \"" + filename + "\": switch to stb_image backend backend = IO_STB_IMAGE_BACKEND; #endif } @@ -413,29 +411,58 @@ void vpImageIo::readJPEG(vpImage &I, const std::string &filename, backend = IO_OPENCV_BACKEND; #elif defined(VISP_HAVE_JPEG) backend = IO_SYSTEM_LIB_BACKEND; -#else +#elif defined(VISP_HAVE_SIMDLIB) + backend = IO_SIMDLIB_BACKEND; +#elif defined(VISP_HAVE_STBIMAGE) backend = IO_STB_IMAGE_BACKEND; +#else + (void)I; + (void)filename; + std::string message = "Cannot read file \"" + filename + "\": no backend available"; + throw(vpImageException(vpImageException::ioError, message)); #endif } if (backend == IO_SYSTEM_LIB_BACKEND) { #if defined(VISP_HAVE_JPEG) readJPEGLibjpeg(I, filename); +#else + (void)I; + (void)filename; + std::string message = "Cannot read file \"" + filename + "\": jpeg library backend is not available"; + throw(vpImageException(vpImageException::ioError, message)); #endif } else if (backend == IO_OPENCV_BACKEND) { #if ((VISP_HAVE_OPENCV_VERSION >= 0x030000) && defined(HAVE_OPENCV_IMGCODECS)) || ((VISP_HAVE_OPENCV_VERSION < 0x030000) && defined(HAVE_OPENCV_HIGHGUI) && defined(HAVE_OPENCV_IMGPROC)) readOpenCV(I, filename); +#else + (void)I; + (void)filename; + std::string message = "Cannot read file \"" + filename + "\": OpenCV library backend is not available"; + throw(vpImageException(vpImageException::ioError, message)); #endif } else if (backend == IO_STB_IMAGE_BACKEND) { +#if defined(VISP_HAVE_STBIMAGE) readStb(I, filename); +#else + (void)I; + (void)filename; + std::string message = "Cannot read file \"" + filename + "\": stb_image backend is not available"; + throw(vpImageException(vpImageException::ioError, message)); +#endif } -#if defined(VISP_HAVE_SIMDLIB) else if (backend == IO_SIMDLIB_BACKEND) { +#if defined(VISP_HAVE_SIMDLIB) readSimdlib(I, filename); - } +#else + (void)I; + (void)filename; + std::string message = "Cannot read file \"" + filename + "\": Simd library backend is not available"; + throw(vpImageException(vpImageException::ioError, message)); #endif + } } /*! @@ -469,29 +496,58 @@ void vpImageIo::readJPEG(vpImage &I, const std::string &filename, int ba backend = IO_OPENCV_BACKEND; #elif defined(VISP_HAVE_JPEG) backend = IO_SYSTEM_LIB_BACKEND; -#else +#elif defined(VISP_HAVE_SIMDLIB) + backend = IO_SIMDLIB_BACKEND; +#elif defined(VISP_HAVE_STBIMAGE) backend = IO_STB_IMAGE_BACKEND; +#else + (void)I; + (void)filename; + std::string message = "Cannot read file \"" + filename + "\": no backend available"; + throw(vpImageException(vpImageException::ioError, message)); #endif } if (backend == IO_SYSTEM_LIB_BACKEND) { #if defined(VISP_HAVE_JPEG) readJPEGLibjpeg(I, filename); +#else + (void)I; + (void)filename; + std::string message = "Cannot read file \"" + filename + "\": jpeg library backend is not available"; + throw(vpImageException(vpImageException::ioError, message)); #endif } else if (backend == IO_OPENCV_BACKEND) { #if ((VISP_HAVE_OPENCV_VERSION >= 0x030000) && defined(HAVE_OPENCV_IMGCODECS)) || ((VISP_HAVE_OPENCV_VERSION < 0x030000) && defined(HAVE_OPENCV_HIGHGUI) && defined(HAVE_OPENCV_IMGPROC)) readOpenCV(I, filename); +#else + (void)I; + (void)filename; + std::string message = "Cannot read file \"" + filename + "\": OpenCV library backend is not available"; + throw(vpImageException(vpImageException::ioError, message)); #endif } else if (backend == IO_STB_IMAGE_BACKEND) { +#if defined(VISP_HAVE_STBIMAGE) readStb(I, filename); +#else + (void)I; + (void)filename; + std::string message = "Cannot read file \"" + filename + "\": stb_image backend is not available"; + throw(vpImageException(vpImageException::ioError, message)); +#endif } -#if defined(VISP_HAVE_SIMDLIB) else if (backend == IO_SIMDLIB_BACKEND) { +#if defined(VISP_HAVE_SIMDLIB) readSimdlib(I, filename); - } +#else + (void)I; + (void)filename; + std::string message = "Cannot read file \"" + filename + "\": Simd library backend is not available"; + throw(vpImageException(vpImageException::ioError, message)); #endif + } } /*! @@ -525,29 +581,58 @@ void vpImageIo::readPNG(vpImage &I, const std::string &filename, backend = IO_SYSTEM_LIB_BACKEND; #elif defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGCODECS) backend = IO_OPENCV_BACKEND; -#else +#elif defined(VISP_HAVE_SIMDLIB) + backend = IO_SIMDLIB_BACKEND; +#elif defined(VISP_HAVE_STBIMAGE) backend = IO_STB_IMAGE_BACKEND; +#else + (void)I; + (void)filename; + std::string message = "Cannot read file \"" + filename + "\": no backend available"; + throw(vpImageException(vpImageException::ioError, message)); #endif } if (backend == IO_SYSTEM_LIB_BACKEND) { #if defined(VISP_HAVE_PNG) readPNGLibpng(I, filename); +#else + (void)I; + (void)filename; + std::string message = "Cannot read file \"" + filename + "\": png library backend is not available"; + throw(vpImageException(vpImageException::ioError, message)); #endif } else if (backend == IO_OPENCV_BACKEND) { #if ((VISP_HAVE_OPENCV_VERSION >= 0x030000) && defined(HAVE_OPENCV_IMGCODECS)) || ((VISP_HAVE_OPENCV_VERSION < 0x030000) && defined(HAVE_OPENCV_HIGHGUI) && defined(HAVE_OPENCV_IMGPROC)) readOpenCV(I, filename); +#else + (void)I; + (void)filename; + std::string message = "Cannot read file \"" + filename + "\": OpenCV library backend is not available"; + throw(vpImageException(vpImageException::ioError, message)); #endif } else if (backend == IO_STB_IMAGE_BACKEND) { +#if defined(VISP_HAVE_STBIMAGE) readStb(I, filename); +#else + (void)I; + (void)filename; + std::string message = "Cannot read file \"" + filename + "\": stb_image backend is not available"; + throw(vpImageException(vpImageException::ioError, message)); +#endif } -#if defined(VISP_HAVE_SIMDLIB) else if (backend == IO_SIMDLIB_BACKEND) { +#if defined(VISP_HAVE_SIMDLIB) readSimdlib(I, filename); - } +#else + (void)I; + (void)filename; + std::string message = "Cannot read file \"" + filename + "\": Simd library backend is not available"; + throw(vpImageException(vpImageException::ioError, message)); #endif + } } /*! @@ -579,29 +664,58 @@ void vpImageIo::readPNG(vpImage &I, const std::string &filename, int bac else if (backend == IO_DEFAULT_BACKEND) { #if ((VISP_HAVE_OPENCV_VERSION >= 0x030000) && defined(HAVE_OPENCV_IMGCODECS)) || ((VISP_HAVE_OPENCV_VERSION < 0x030000) && defined(HAVE_OPENCV_HIGHGUI) && defined(HAVE_OPENCV_IMGPROC)) backend = IO_OPENCV_BACKEND; -#else +#elif defined(VISP_HAVE_SIMDLIB) + backend = IO_SIMDLIB_BACKEND; +#elif defined(VISP_HAVE_STBIMAGE) backend = IO_STB_IMAGE_BACKEND; +#else + (void)I; + (void)filename; + std::string message = "Cannot read file \"" + filename + "\": no backend available"; + throw(vpImageException(vpImageException::ioError, message)); #endif } if (backend == IO_SYSTEM_LIB_BACKEND) { #if defined(VISP_HAVE_PNG) readPNGLibpng(I, filename); +#else + (void)I; + (void)filename; + std::string message = "Cannot read file \"" + filename + "\": png library backend is not available"; + throw(vpImageException(vpImageException::ioError, message)); #endif } else if (backend == IO_OPENCV_BACKEND) { #if ((VISP_HAVE_OPENCV_VERSION >= 0x030000) && defined(HAVE_OPENCV_IMGCODECS)) || ((VISP_HAVE_OPENCV_VERSION < 0x030000) && defined(HAVE_OPENCV_HIGHGUI) && defined(HAVE_OPENCV_IMGPROC)) readOpenCV(I, filename); +#else + (void)I; + (void)filename; + std::string message = "Cannot read file \"" + filename + "\": OpenCV library backend is not available"; + throw(vpImageException(vpImageException::ioError, message)); #endif } else if (backend == IO_STB_IMAGE_BACKEND) { +#if defined(VISP_HAVE_STBIMAGE) readStb(I, filename); +#else + (void)I; + (void)filename; + std::string message = "Cannot read file \"" + filename + "\": stb_image backend is not available"; + throw(vpImageException(vpImageException::ioError, message)); +#endif } -#if defined(VISP_HAVE_SIMDLIB) else if (backend == IO_SIMDLIB_BACKEND) { +#if defined(VISP_HAVE_SIMDLIB) readSimdlib(I, filename); - } +#else + (void)I; + (void)filename; + std::string message = "Cannot read file \"" + filename + "\": Simd library backend is not available"; + throw(vpImageException(vpImageException::ioError, message)); #endif + } } /*! @@ -744,28 +858,59 @@ void vpImageIo::writeJPEG(const vpImage &I, const std::string &fi backend = IO_OPENCV_BACKEND; #elif defined(VISP_HAVE_SIMDLIB) backend = IO_SIMDLIB_BACKEND; -#else +#elif defined(VISP_HAVE_STBIMAGE) backend = IO_STB_IMAGE_BACKEND; +#else + (void)I; + (void)filename; + std::string message = "Cannot save file \"" + filename + "\": no backend available"; + throw(vpImageException(vpImageException::ioError, message)); #endif } if (backend == IO_SYSTEM_LIB_BACKEND) { #if defined(VISP_HAVE_JPEG) writeJPEGLibjpeg(I, filename, quality); +#else + (void)I; + (void)filename; + (void)quality; + std::string message = "Cannot save file \"" + filename + "\": jpeg backend is not available"; + throw(vpImageException(vpImageException::ioError, message)); #endif } else if (backend == IO_OPENCV_BACKEND) { #if ((VISP_HAVE_OPENCV_VERSION >= 0x030000) && defined(HAVE_OPENCV_IMGCODECS)) || ((VISP_HAVE_OPENCV_VERSION < 0x030000) && defined(HAVE_OPENCV_HIGHGUI) && defined(HAVE_OPENCV_IMGPROC)) writeOpenCV(I, filename, quality); +#else + (void)I; + (void)filename; + (void)quality; + std::string message = "Cannot save file \"" + filename + "\": OpenCV backend is not available"; + throw(vpImageException(vpImageException::ioError, message)); #endif } -#if defined(VISP_HAVE_SIMDLIB) else if (backend == IO_SIMDLIB_BACKEND) { +#if defined(VISP_HAVE_SIMDLIB) writeJPEGSimdlib(I, filename, quality); - } +#else + (void)I; + (void)filename; + (void)quality; + std::string message = "Cannot save file \"" + filename + "\": Simd library backend is not available"; + throw(vpImageException(vpImageException::ioError, message)); #endif + } else if (backend == IO_STB_IMAGE_BACKEND) { +#if defined(VISP_HAVE_STBIMAGE) writeJPEGStb(I, filename, quality); +#else + (void)I; + (void)filename; + (void)quality; + std::string message = "Cannot save file \"" + filename + "\": stb_image backend is not available"; + throw(vpImageException(vpImageException::ioError, message)); +#endif } } @@ -811,28 +956,59 @@ void vpImageIo::writeJPEG(const vpImage &I, const std::string &filename, backend = IO_OPENCV_BACKEND; #elif defined(VISP_HAVE_SIMDLIB) backend = IO_SIMDLIB_BACKEND; -#else +#elif defined(VISP_HAVE_STBIMAGE) backend = IO_STB_IMAGE_BACKEND; +#else + (void)I; + (void)filename; + std::string message = "Cannot save file \"" + filename + "\": no backend available"; + throw(vpImageException(vpImageException::ioError, message)); #endif } if (backend == IO_SYSTEM_LIB_BACKEND) { #if defined(VISP_HAVE_JPEG) writeJPEGLibjpeg(I, filename, quality); +#else + (void)I; + (void)filename; + (void)quality; + std::string message = "Cannot save file \"" + filename + "\": jpeg library backend is not available"; + throw(vpImageException(vpImageException::ioError, message)); #endif } else if (backend == IO_OPENCV_BACKEND) { #if ((VISP_HAVE_OPENCV_VERSION >= 0x030000) && defined(HAVE_OPENCV_IMGCODECS)) || ((VISP_HAVE_OPENCV_VERSION < 0x030000) && defined(HAVE_OPENCV_HIGHGUI) && defined(HAVE_OPENCV_IMGPROC)) writeOpenCV(I, filename, quality); +#else + (void)I; + (void)filename; + (void)quality; + std::string message = "Cannot save file \"" + filename + "\": OpenCV library backend is not available"; + throw(vpImageException(vpImageException::ioError, message)); #endif } -#if defined(VISP_HAVE_SIMDLIB) else if (backend == IO_SIMDLIB_BACKEND) { +#if defined(VISP_HAVE_SIMDLIB) writeJPEGSimdlib(I, filename, quality); - } +#else + (void)I; + (void)filename; + (void)quality; + std::string message = "Cannot save file \"" + filename + "\": Simd library backend is not available"; + throw(vpImageException(vpImageException::ioError, message)); #endif + } else if (backend == IO_STB_IMAGE_BACKEND) { +#if defined(VISP_HAVE_STBIMAGE) writeJPEGStb(I, filename, quality); +#else + (void)I; + (void)filename; + (void)quality; + std::string message = "Cannot save file \"" + filename + "\": stb_image backend is not available"; + throw(vpImageException(vpImageException::ioError, message)); +#endif } } @@ -875,27 +1051,56 @@ void vpImageIo::writePNG(const vpImage &I, const std::string &fil backend = IO_OPENCV_BACKEND; #elif defined(VISP_HAVE_SIMDLIB) backend = IO_SIMDLIB_BACKEND; -#else +#elif defined(VISP_HAVE_STBIMAGE) backend = IO_STB_IMAGE_BACKEND; +#else + (void)I; + (void)filename; + std::string message = "Cannot save file \"" + filename + "\": no backend available"; + throw(vpImageException(vpImageException::ioError, message)); #endif } if (backend == IO_OPENCV_BACKEND) { #if ((VISP_HAVE_OPENCV_VERSION >= 0x030000) && defined(HAVE_OPENCV_IMGCODECS)) || ((VISP_HAVE_OPENCV_VERSION < 0x030000) && defined(HAVE_OPENCV_HIGHGUI) && defined(HAVE_OPENCV_IMGPROC)) writeOpenCV(I, filename, 90); +#else + (void)I; + (void)filename; + (void)quality; + std::string message = "Cannot save file \"" + filename + "\": OpenCV library backend is not available"; + throw(vpImageException(vpImageException::ioError, message)); #endif } -#if defined(VISP_HAVE_SIMDLIB) else if (backend == IO_SIMDLIB_BACKEND) { +#if defined(VISP_HAVE_SIMDLIB) writePNGSimdlib(I, filename); - } +#else + (void)I; + (void)filename; + std::string message = "Cannot save file \"" + filename + "\": Simd library backend is not available"; + throw(vpImageException(vpImageException::ioError, message)); #endif + } else if (backend == IO_STB_IMAGE_BACKEND) { +#if defined(VISP_HAVE_STBIMAGE) writePNGStb(I, filename); +#else + (void)I; + (void)filename; + std::string message = "Cannot save file \"" + filename + "\": stb_image backend is not available"; + throw(vpImageException(vpImageException::ioError, message)); +#endif } else if (backend == IO_SYSTEM_LIB_BACKEND) { #if defined(VISP_HAVE_PNG) writePNGLibpng(I, filename); +#else + (void)I; + (void)filename; + (void)quality; + std::string message = "Cannot save file \"" + filename + "\": png library backend is not available"; + throw(vpImageException(vpImageException::ioError, message)); #endif } } @@ -939,27 +1144,54 @@ void vpImageIo::writePNG(const vpImage &I, const std::string &filename, backend = IO_OPENCV_BACKEND; #elif defined(VISP_HAVE_SIMDLIB) backend = IO_SIMDLIB_BACKEND; -#else +#elif defined(VISP_HAVE_STBIMAGE) backend = IO_STB_IMAGE_BACKEND; +#else + (void)I; + (void)filename; + std::string message = "Cannot save file \"" + filename + "\": no backend available"; + throw(vpImageException(vpImageException::ioError, message)); #endif } if (backend == IO_OPENCV_BACKEND) { #if ((VISP_HAVE_OPENCV_VERSION >= 0x030000) && defined(HAVE_OPENCV_IMGCODECS)) || ((VISP_HAVE_OPENCV_VERSION < 0x030000) && defined(HAVE_OPENCV_HIGHGUI) && defined(HAVE_OPENCV_IMGPROC)) writeOpenCV(I, filename, 90); +#else + (void)I; + (void)filename; + std::string message = "Cannot save file \"" + filename + "\": OpenCV backend is not available"; + throw(vpImageException(vpImageException::ioError, message)); #endif } -#if defined(VISP_HAVE_SIMDLIB) else if (backend == IO_SIMDLIB_BACKEND) { +#if defined(VISP_HAVE_SIMDLIB) writePNGSimdlib(I, filename); - } +#else + (void)I; + (void)filename; + std::string message = "Cannot save file \"" + filename + "\": Simd library backend is not available"; + throw(vpImageException(vpImageException::ioError, message)); #endif + } else if (backend == IO_STB_IMAGE_BACKEND) { +#if defined(VISP_HAVE_STBIMAGE) writePNGStb(I, filename); +#else + (void)I; + (void)filename; + std::string message = "Cannot save file \"" + filename + "\": stb_image backend is not available"; + throw(vpImageException(vpImageException::ioError, message)); +#endif } else if (backend == IO_SYSTEM_LIB_BACKEND) { #if defined(VISP_HAVE_PNG) writePNGLibpng(I, filename); +#else + (void)I; + (void)filename; + std::string message = "Cannot save file \"" + filename + "\": libpng backend is not available"; + throw(vpImageException(vpImageException::ioError, message)); #endif } } diff --git a/modules/io/test/image-with-dataset/testImageLoadSave.cpp b/modules/io/test/image-with-dataset/testImageLoadSave.cpp index ed325b4ff0..50e4def6ee 100644 --- a/modules/io/test/image-with-dataset/testImageLoadSave.cpp +++ b/modules/io/test/image-with-dataset/testImageLoadSave.cpp @@ -61,7 +61,9 @@ static const std::vector backends #if defined VISP_HAVE_SIMDLIB vpImageIo::IO_SIMDLIB_BACKEND, #endif +#if defined VISP_HAVE_STBIMAGE vpImageIo::IO_STB_IMAGE_BACKEND +#endif }; static const std::vector backendNamesJpeg { diff --git a/modules/robot/src/real-robot/afma6/vpAfma6.cpp b/modules/robot/src/real-robot/afma6/vpAfma6.cpp index 600ee8e7cc..1462909d95 100644 --- a/modules/robot/src/real-robot/afma6/vpAfma6.cpp +++ b/modules/robot/src/real-robot/afma6/vpAfma6.cpp @@ -58,45 +58,45 @@ /* --- STATIC ------------------------------------------------------------ */ /* ---------------------------------------------------------------------- */ -static const char *opt_Afma6[] = {"JOINT_MAX", "JOINT_MIN", "LONG_56", "COUPL_56", - "CAMERA", "eMc_ROT_XYZ", "eMc_TRANS_XYZ", nullptr}; +static const char *opt_Afma6[] = { "JOINT_MAX", "JOINT_MIN", "LONG_56", "COUPL_56", + "CAMERA", "eMc_ROT_XYZ", "eMc_TRANS_XYZ", nullptr }; #ifdef VISP_HAVE_AFMA6_DATA const std::string vpAfma6::CONST_AFMA6_FILENAME = - std::string(VISP_AFMA6_DATA_PATH) + std::string("/include/const_Afma6.cnf"); +std::string(VISP_AFMA6_DATA_PATH) + std::string("/include/const_Afma6.cnf"); const std::string vpAfma6::CONST_EMC_CCMOP_WITHOUT_DISTORTION_FILENAME = - std::string(VISP_AFMA6_DATA_PATH) + std::string("/include/const_eMc_ccmop_without_distortion_Afma6.cnf"); +std::string(VISP_AFMA6_DATA_PATH) + std::string("/include/const_eMc_ccmop_without_distortion_Afma6.cnf"); const std::string vpAfma6::CONST_EMC_CCMOP_WITH_DISTORTION_FILENAME = - std::string(VISP_AFMA6_DATA_PATH) + std::string("/include/const_eMc_ccmop_with_distortion_Afma6.cnf"); +std::string(VISP_AFMA6_DATA_PATH) + std::string("/include/const_eMc_ccmop_with_distortion_Afma6.cnf"); const std::string vpAfma6::CONST_EMC_GRIPPER_WITHOUT_DISTORTION_FILENAME = - std::string(VISP_AFMA6_DATA_PATH) + std::string("/include/const_eMc_gripper_without_distortion_Afma6.cnf"); +std::string(VISP_AFMA6_DATA_PATH) + std::string("/include/const_eMc_gripper_without_distortion_Afma6.cnf"); const std::string vpAfma6::CONST_EMC_GRIPPER_WITH_DISTORTION_FILENAME = - std::string(VISP_AFMA6_DATA_PATH) + std::string("/include/const_eMc_gripper_with_distortion_Afma6.cnf"); +std::string(VISP_AFMA6_DATA_PATH) + std::string("/include/const_eMc_gripper_with_distortion_Afma6.cnf"); const std::string vpAfma6::CONST_EMC_VACUUM_WITHOUT_DISTORTION_FILENAME = - std::string(VISP_AFMA6_DATA_PATH) + std::string("/include/const_eMc_vacuum_without_distortion_Afma6.cnf"); +std::string(VISP_AFMA6_DATA_PATH) + std::string("/include/const_eMc_vacuum_without_distortion_Afma6.cnf"); const std::string vpAfma6::CONST_EMC_VACUUM_WITH_DISTORTION_FILENAME = - std::string(VISP_AFMA6_DATA_PATH) + std::string("/include/const_eMc_vacuum_with_distortion_Afma6.cnf"); +std::string(VISP_AFMA6_DATA_PATH) + std::string("/include/const_eMc_vacuum_with_distortion_Afma6.cnf"); const std::string vpAfma6::CONST_EMC_GENERIC_WITHOUT_DISTORTION_FILENAME = - std::string(VISP_AFMA6_DATA_PATH) + std::string("/include/const_eMc_generic_without_distortion_Afma6.cnf"); +std::string(VISP_AFMA6_DATA_PATH) + std::string("/include/const_eMc_generic_without_distortion_Afma6.cnf"); const std::string vpAfma6::CONST_EMC_GENERIC_WITH_DISTORTION_FILENAME = - std::string(VISP_AFMA6_DATA_PATH) + std::string("/include/const_eMc_generic_with_distortion_Afma6.cnf"); +std::string(VISP_AFMA6_DATA_PATH) + std::string("/include/const_eMc_generic_with_distortion_Afma6.cnf"); const std::string vpAfma6::CONST_EMC_INTEL_D435_WITHOUT_DISTORTION_FILENAME = - std::string(VISP_AFMA6_DATA_PATH) + std::string("/include/const_eMc_Intel_D435_without_distortion_Afma6.cnf"); +std::string(VISP_AFMA6_DATA_PATH) + std::string("/include/const_eMc_Intel_D435_without_distortion_Afma6.cnf"); const std::string vpAfma6::CONST_EMC_INTEL_D435_WITH_DISTORTION_FILENAME = - std::string(VISP_AFMA6_DATA_PATH) + std::string("/include/const_eMc_Intel_D435_with_distortion_Afma6.cnf"); +std::string(VISP_AFMA6_DATA_PATH) + std::string("/include/const_eMc_Intel_D435_with_distortion_Afma6.cnf"); const std::string vpAfma6::CONST_CAMERA_AFMA6_FILENAME = - std::string(VISP_AFMA6_DATA_PATH) + std::string("/include/const_camera_Afma6.xml"); +std::string(VISP_AFMA6_DATA_PATH) + std::string("/include/const_camera_Afma6.xml"); #endif // VISP_HAVE_AFMA6_DATA @@ -117,7 +117,7 @@ const unsigned int vpAfma6::njoint = 6; */ vpAfma6::vpAfma6() : _coupl_56(0), _long_56(0), _etc(), _erc(), _eMc(), tool_current(vpAfma6::defaultTool), - projModel(vpCameraParameters::perspectiveProjWithoutDistortion) + projModel(vpCameraParameters::perspectiveProjWithoutDistortion) { // Set the default parameters in case of the config files are not available. @@ -641,7 +641,8 @@ int vpAfma6::getInverseKinematics(const vpHomogeneousMatrix &fMc, vpColVector &q q_[1][5] += vpMath::rad(10); q_[1][3] -= vpMath::rad(10); } - } else if (fMe[2][2] <= -.99999) { + } + else if (fMe[2][2] <= -.99999) { vpTRACE("singularity\n"); q_[0][4] = q_[1][4] = -M_PI / 2; t = atan2(fMe[1][1], fMe[1][0]); @@ -657,7 +658,8 @@ int vpAfma6::getInverseKinematics(const vpHomogeneousMatrix &fMc, vpColVector &q q_[1][5] += vpMath::rad(10); q_[1][3] += vpMath::rad(10); } - } else { + } + else { q_[0][3] = atan2(-fMe[0][2], fMe[1][2]); if (q_[0][3] >= 0.0) q_[1][3] = q_[0][3] - M_PI; @@ -694,10 +696,10 @@ int vpAfma6::getInverseKinematics(const vpHomogeneousMatrix &fMc, vpColVector &q if (verbose) { if (i < 3) std::cout << "Joint " << i << " not in limits: " << this->_joint_min[i] << " < " << q_[j][i] << " < " - << this->_joint_max[i] << std::endl; + << this->_joint_max[i] << std::endl; else std::cout << "Joint " << i << " not in limits: " << vpMath::deg(this->_joint_min[i]) << " < " - << vpMath::deg(q_[j][i]) << " < " << vpMath::deg(this->_joint_max[i]) << std::endl; + << vpMath::deg(q_[j][i]) << " < " << vpMath::deg(this->_joint_max[i]) << std::endl; } ok[j] = 0; } @@ -708,17 +710,20 @@ int vpAfma6::getInverseKinematics(const vpHomogeneousMatrix &fMc, vpColVector &q std::cout << "No solution..." << std::endl; nbsol = 0; return nbsol; - } else if (ok[1] == 1) { + } + else if (ok[1] == 1) { for (unsigned int i = 0; i < 6; i++) cord[i] = q_[1][i]; nbsol = 1; } - } else { + } + else { if (ok[1] == 0) { for (unsigned int i = 0; i < 6; i++) cord[i] = q_[0][i]; nbsol = 1; - } else { + } + else { nbsol = 2; // vpTRACE("2 solutions\n"); for (int j = 0; j < 2; j++) { @@ -733,7 +738,8 @@ int vpAfma6::getInverseKinematics(const vpHomogeneousMatrix &fMc, vpColVector &q else for (unsigned int i = 0; i < 6; i++) cord[i] = q_[1][i]; - } else { + } + else { if (d[0] <= d[1]) for (unsigned int i = 0; i < 6; i++) cord[i] = q_[1][i]; @@ -1125,12 +1131,12 @@ void vpAfma6::parseConfigFile(const std::string &filename) switch (code) { case 0: ss >> this->_joint_max[0] >> this->_joint_max[1] >> this->_joint_max[2] >> this->_joint_max[3] >> - this->_joint_max[4] >> this->_joint_max[5]; + this->_joint_max[4] >> this->_joint_max[5]; break; case 1: ss >> this->_joint_min[0] >> this->_joint_min[1] >> this->_joint_min[2] >> this->_joint_min[3] >> - this->_joint_min[4] >> this->_joint_min[5]; + this->_joint_min[4] >> this->_joint_min[5]; break; case 2: @@ -1255,12 +1261,12 @@ parameters are not found. void vpAfma6::getCameraParameters(vpCameraParameters &cam, const unsigned int &image_width, const unsigned int &image_height) const { -#if defined(VISP_HAVE_AFMA6_DATA) +#if defined(VISP_HAVE_AFMA6_DATA) && defined(VISP_HAVE_PUGIXML) vpXmlParserCamera parser; switch (getToolType()) { case vpAfma6::TOOL_CCMOP: { std::cout << "Get camera parameters for camera \"" << vpAfma6::CONST_CCMOP_CAMERA_NAME << "\"" << std::endl - << "from the XML file: \"" << vpAfma6::CONST_CAMERA_AFMA6_FILENAME << "\"" << std::endl; + << "from the XML file: \"" << vpAfma6::CONST_CAMERA_AFMA6_FILENAME << "\"" << std::endl; if (parser.parse(cam, vpAfma6::CONST_CAMERA_AFMA6_FILENAME, vpAfma6::CONST_CCMOP_CAMERA_NAME, projModel, image_width, image_height) != vpXmlParserCamera::SEQUENCE_OK) { throw vpRobotException(vpRobotException::readingParametersError, "Impossible to read the camera parameters."); @@ -1269,7 +1275,7 @@ void vpAfma6::getCameraParameters(vpCameraParameters &cam, const unsigned int &i } case vpAfma6::TOOL_GRIPPER: { std::cout << "Get camera parameters for camera \"" << vpAfma6::CONST_GRIPPER_CAMERA_NAME << "\"" << std::endl - << "from the XML file: \"" << vpAfma6::CONST_CAMERA_AFMA6_FILENAME << "\"" << std::endl; + << "from the XML file: \"" << vpAfma6::CONST_CAMERA_AFMA6_FILENAME << "\"" << std::endl; if (parser.parse(cam, vpAfma6::CONST_CAMERA_AFMA6_FILENAME, vpAfma6::CONST_GRIPPER_CAMERA_NAME, projModel, image_width, image_height) != vpXmlParserCamera::SEQUENCE_OK) { throw vpRobotException(vpRobotException::readingParametersError, "Impossible to read the camera parameters."); @@ -1278,7 +1284,7 @@ void vpAfma6::getCameraParameters(vpCameraParameters &cam, const unsigned int &i } case vpAfma6::TOOL_VACUUM: { std::cout << "Get camera parameters for camera \"" << vpAfma6::CONST_VACUUM_CAMERA_NAME << "\"" << std::endl - << "from the XML file: \"" << vpAfma6::CONST_CAMERA_AFMA6_FILENAME << "\"" << std::endl; + << "from the XML file: \"" << vpAfma6::CONST_CAMERA_AFMA6_FILENAME << "\"" << std::endl; if (parser.parse(cam, vpAfma6::CONST_CAMERA_AFMA6_FILENAME, vpAfma6::CONST_VACUUM_CAMERA_NAME, projModel, image_width, image_height) != vpXmlParserCamera::SEQUENCE_OK) { throw vpRobotException(vpRobotException::readingParametersError, "Impossible to read the camera parameters."); @@ -1287,7 +1293,7 @@ void vpAfma6::getCameraParameters(vpCameraParameters &cam, const unsigned int &i } case vpAfma6::TOOL_INTEL_D435_CAMERA: { std::cout << "Get camera parameters for camera \"" << vpAfma6::CONST_INTEL_D435_CAMERA_NAME << "\"" << std::endl - << "from the XML file: \"" << vpAfma6::CONST_CAMERA_AFMA6_FILENAME << "\"" << std::endl; + << "from the XML file: \"" << vpAfma6::CONST_CAMERA_AFMA6_FILENAME << "\"" << std::endl; if (parser.parse(cam, vpAfma6::CONST_CAMERA_AFMA6_FILENAME, vpAfma6::CONST_INTEL_D435_CAMERA_NAME, projModel, image_width, image_height) != vpXmlParserCamera::SEQUENCE_OK) { throw vpRobotException(vpRobotException::readingParametersError, "Impossible to read the camera parameters."); @@ -1296,7 +1302,7 @@ void vpAfma6::getCameraParameters(vpCameraParameters &cam, const unsigned int &i } case vpAfma6::TOOL_GENERIC_CAMERA: { std::cout << "Get camera parameters for camera \"" << vpAfma6::CONST_GENERIC_CAMERA_NAME << "\"" << std::endl - << "from the XML file: \"" << vpAfma6::CONST_CAMERA_AFMA6_FILENAME << "\"" << std::endl; + << "from the XML file: \"" << vpAfma6::CONST_CAMERA_AFMA6_FILENAME << "\"" << std::endl; if (parser.parse(cam, vpAfma6::CONST_CAMERA_AFMA6_FILENAME, vpAfma6::CONST_GENERIC_CAMERA_NAME, projModel, image_width, image_height) != vpXmlParserCamera::SEQUENCE_OK) { throw vpRobotException(vpRobotException::readingParametersError, "Impossible to read the camera parameters."); @@ -1323,7 +1329,7 @@ void vpAfma6::getCameraParameters(vpCameraParameters &cam, const unsigned int &i // Set default intrinsic camera parameters for 640x480 images if (image_width == 640 && image_height == 480) { std::cout << "Get default camera parameters for camera \"" << vpAfma6::CONST_CCMOP_CAMERA_NAME << "\"" - << std::endl; + << std::endl; switch (this->projModel) { case vpCameraParameters::perspectiveProjWithoutDistortion: cam.initPersProjWithoutDistortion(1108.0, 1110.0, 314.5, 243.2); @@ -1336,7 +1342,8 @@ void vpAfma6::getCameraParameters(vpCameraParameters &cam, const unsigned int &i "Feature getCameraParameters is not implemented for Kannala-Brandt projection model yet."); break; } - } else { + } + else { vpTRACE("Cannot get default intrinsic camera parameters for this image " "resolution"); throw vpRobotException(vpRobotException::readingParametersError, "Impossible to read the camera parameters."); @@ -1347,7 +1354,7 @@ void vpAfma6::getCameraParameters(vpCameraParameters &cam, const unsigned int &i // Set default intrinsic camera parameters for 640x480 images if (image_width == 640 && image_height == 480) { std::cout << "Get default camera parameters for camera \"" << vpAfma6::CONST_GRIPPER_CAMERA_NAME << "\"" - << std::endl; + << std::endl; switch (this->projModel) { case vpCameraParameters::perspectiveProjWithoutDistortion: cam.initPersProjWithoutDistortion(850.9, 853.0, 311.1, 243.6); @@ -1360,7 +1367,8 @@ void vpAfma6::getCameraParameters(vpCameraParameters &cam, const unsigned int &i "Feature getCameraParameters is not implemented for Kannala-Brandt projection model yet."); break; } - } else { + } + else { vpTRACE("Cannot get default intrinsic camera parameters for this image " "resolution"); throw vpRobotException(vpRobotException::readingParametersError, "Impossible to read the camera parameters."); @@ -1371,7 +1379,7 @@ void vpAfma6::getCameraParameters(vpCameraParameters &cam, const unsigned int &i // Set default intrinsic camera parameters for 640x480 images if (image_width == 640 && image_height == 480) { std::cout << "Get default camera parameters for camera \"" << vpAfma6::CONST_VACUUM_CAMERA_NAME << "\"" - << std::endl; + << std::endl; switch (this->projModel) { case vpCameraParameters::perspectiveProjWithoutDistortion: cam.initPersProjWithoutDistortion(853.5, 856.0, 307.8, 236.8); @@ -1384,7 +1392,8 @@ void vpAfma6::getCameraParameters(vpCameraParameters &cam, const unsigned int &i "Feature getCameraParameters is not implemented for Kannala-Brandt projection model yet."); break; } - } else { + } + else { vpTRACE("Cannot get default intrinsic camera parameters for this image " "resolution"); throw vpRobotException(vpRobotException::readingParametersError, "Impossible to read the camera parameters."); @@ -1395,7 +1404,7 @@ void vpAfma6::getCameraParameters(vpCameraParameters &cam, const unsigned int &i // Set default intrinsic camera parameters for 640x480 images if (image_width == 640 && image_height == 480) { std::cout << "Get default camera parameters for camera \"" << vpAfma6::CONST_INTEL_D435_CAMERA_NAME << "\"" - << std::endl; + << std::endl; switch (this->projModel) { case vpCameraParameters::perspectiveProjWithoutDistortion: cam.initPersProjWithoutDistortion(605.4, 605.6, 328.6, 241.0); @@ -1408,7 +1417,8 @@ void vpAfma6::getCameraParameters(vpCameraParameters &cam, const unsigned int &i "Feature getCameraParameters is not implemented for Kannala-Brandt projection model yet."); break; } - } else { + } + else { vpTRACE("Cannot get default intrinsic camera parameters for this image " "resolution"); throw vpRobotException(vpRobotException::readingParametersError, "Impossible to read the camera parameters."); @@ -1419,7 +1429,7 @@ void vpAfma6::getCameraParameters(vpCameraParameters &cam, const unsigned int &i // Set default intrinsic camera parameters for 640x480 images if (image_width == 640 && image_height == 480) { std::cout << "Get default camera parameters for camera \"" << vpAfma6::CONST_GENERIC_CAMERA_NAME << "\"" - << std::endl; + << std::endl; switch (this->projModel) { case vpCameraParameters::perspectiveProjWithoutDistortion: cam.initPersProjWithoutDistortion(853.5, 856.0, 307.8, 236.8); @@ -1432,7 +1442,8 @@ void vpAfma6::getCameraParameters(vpCameraParameters &cam, const unsigned int &i "Feature getCameraParameters is not implemented for Kannala-Brandt projection model yet."); break; } - } else { + } + else { vpTRACE("Cannot get default intrinsic camera parameters for this image " "resolution"); throw vpRobotException(vpRobotException::readingParametersError, "Impossible to read the camera parameters."); @@ -1557,25 +1568,25 @@ VISP_EXPORT std::ostream &operator<<(std::ostream &os, const vpAfma6 &afma6) vpRxyzVector rxyz(eRc); os << "Joint Max:" << std::endl - << "\t" << afma6._joint_max[0] << "\t" << afma6._joint_max[1] << "\t" << afma6._joint_max[2] << "\t" - << afma6._joint_max[3] << "\t" << afma6._joint_max[4] << "\t" << afma6._joint_max[5] << "\t" << std::endl + << "\t" << afma6._joint_max[0] << "\t" << afma6._joint_max[1] << "\t" << afma6._joint_max[2] << "\t" + << afma6._joint_max[3] << "\t" << afma6._joint_max[4] << "\t" << afma6._joint_max[5] << "\t" << std::endl - << "Joint Min: " << std::endl - << "\t" << afma6._joint_min[0] << "\t" << afma6._joint_min[1] << "\t" << afma6._joint_min[2] << "\t" - << afma6._joint_min[3] << "\t" << afma6._joint_min[4] << "\t" << afma6._joint_min[5] << "\t" << std::endl + << "Joint Min: " << std::endl + << "\t" << afma6._joint_min[0] << "\t" << afma6._joint_min[1] << "\t" << afma6._joint_min[2] << "\t" + << afma6._joint_min[3] << "\t" << afma6._joint_min[4] << "\t" << afma6._joint_min[5] << "\t" << std::endl - << "Long 5-6: " << std::endl - << "\t" << afma6._long_56 << "\t" << std::endl + << "Long 5-6: " << std::endl + << "\t" << afma6._long_56 << "\t" << std::endl - << "Coupling 5-6:" << std::endl - << "\t" << afma6._coupl_56 << "\t" << std::endl + << "Coupling 5-6:" << std::endl + << "\t" << afma6._coupl_56 << "\t" << std::endl - << "eMc: " << std::endl - << "\tTranslation (m): " << afma6._eMc[0][3] << " " << afma6._eMc[1][3] << " " << afma6._eMc[2][3] << "\t" - << std::endl - << "\tRotation Rxyz (rad) : " << rxyz[0] << " " << rxyz[1] << " " << rxyz[2] << "\t" << std::endl - << "\tRotation Rxyz (deg) : " << vpMath::deg(rxyz[0]) << " " << vpMath::deg(rxyz[1]) << " " << vpMath::deg(rxyz[2]) - << "\t" << std::endl; + << "eMc: " << std::endl + << "\tTranslation (m): " << afma6._eMc[0][3] << " " << afma6._eMc[1][3] << " " << afma6._eMc[2][3] << "\t" + << std::endl + << "\tRotation Rxyz (rad) : " << rxyz[0] << " " << rxyz[1] << " " << rxyz[2] << "\t" << std::endl + << "\tRotation Rxyz (deg) : " << vpMath::deg(rxyz[0]) << " " << vpMath::deg(rxyz[1]) << " " << vpMath::deg(rxyz[2]) + << "\t" << std::endl; return os; } diff --git a/modules/robot/src/real-robot/viper/vpViper650.cpp b/modules/robot/src/real-robot/viper/vpViper650.cpp index 2531b8d986..d19b519ead 100644 --- a/modules/robot/src/real-robot/viper/vpViper650.cpp +++ b/modules/robot/src/real-robot/viper/vpViper650.cpp @@ -46,40 +46,40 @@ #include #include -static const char *opt_viper650[] = {"CAMERA", "eMc_ROT_XYZ", "eMc_TRANS_XYZ", nullptr}; +static const char *opt_viper650[] = { "CAMERA", "eMc_ROT_XYZ", "eMc_TRANS_XYZ", nullptr }; #ifdef VISP_HAVE_VIPER650_DATA const std::string vpViper650::CONST_EMC_MARLIN_F033C_WITHOUT_DISTORTION_FILENAME = - std::string(VISP_VIPER650_DATA_PATH) + - std::string("/include/const_eMc_MarlinF033C_without_distortion_Viper650.cnf"); +std::string(VISP_VIPER650_DATA_PATH) + +std::string("/include/const_eMc_MarlinF033C_without_distortion_Viper650.cnf"); const std::string vpViper650::CONST_EMC_MARLIN_F033C_WITH_DISTORTION_FILENAME = - std::string(VISP_VIPER650_DATA_PATH) + std::string("/include/const_eMc_MarlinF033C_with_distortion_Viper650.cnf"); +std::string(VISP_VIPER650_DATA_PATH) + std::string("/include/const_eMc_MarlinF033C_with_distortion_Viper650.cnf"); const std::string vpViper650::CONST_EMC_PTGREY_FLEA2_WITHOUT_DISTORTION_FILENAME = - std::string(VISP_VIPER650_DATA_PATH) + - std::string("/include/const_eMc_PTGreyFlea2_without_distortion_Viper650.cnf"); +std::string(VISP_VIPER650_DATA_PATH) + +std::string("/include/const_eMc_PTGreyFlea2_without_distortion_Viper650.cnf"); const std::string vpViper650::CONST_EMC_PTGREY_FLEA2_WITH_DISTORTION_FILENAME = - std::string(VISP_VIPER650_DATA_PATH) + std::string("/include/const_eMc_PTGreyFlea2_with_distortion_Viper650.cnf"); +std::string(VISP_VIPER650_DATA_PATH) + std::string("/include/const_eMc_PTGreyFlea2_with_distortion_Viper650.cnf"); const std::string vpViper650::CONST_EMC_SCHUNK_GRIPPER_WITHOUT_DISTORTION_FILENAME = - std::string(VISP_VIPER650_DATA_PATH) + std::string("/include/" - "const_eMc_schunk_gripper_without_distortion_Viper650." - "cnf"); +std::string(VISP_VIPER650_DATA_PATH) + std::string("/include/" + "const_eMc_schunk_gripper_without_distortion_Viper650." + "cnf"); const std::string vpViper650::CONST_EMC_SCHUNK_GRIPPER_WITH_DISTORTION_FILENAME = - std::string(VISP_VIPER650_DATA_PATH) + - std::string("/include/const_eMc_schunk_gripper_with_distortion_Viper650.cnf"); +std::string(VISP_VIPER650_DATA_PATH) + +std::string("/include/const_eMc_schunk_gripper_with_distortion_Viper650.cnf"); const std::string vpViper650::CONST_EMC_GENERIC_WITHOUT_DISTORTION_FILENAME = - std::string(VISP_VIPER650_DATA_PATH) + std::string("/include/const_eMc_generic_without_distortion_Viper650.cnf"); +std::string(VISP_VIPER650_DATA_PATH) + std::string("/include/const_eMc_generic_without_distortion_Viper650.cnf"); const std::string vpViper650::CONST_EMC_GENERIC_WITH_DISTORTION_FILENAME = - std::string(VISP_VIPER650_DATA_PATH) + std::string("/include/const_eMc_generic_with_distortion_Viper650.cnf"); +std::string(VISP_VIPER650_DATA_PATH) + std::string("/include/const_eMc_generic_with_distortion_Viper650.cnf"); const std::string vpViper650::CONST_CAMERA_FILENAME = - std::string(VISP_VIPER650_DATA_PATH) + std::string("/include/const_camera_Viper650.xml"); +std::string(VISP_VIPER650_DATA_PATH) + std::string("/include/const_camera_Viper650.xml"); #endif // VISP_HAVE_VIPER650_DATA @@ -473,7 +473,8 @@ void vpViper650::parseConfigFile(const std::string &filename) // Compute the eMc matrix from the translations and rotations if (get_etc && get_erc) { this->set_eMc(etc_, erc_); - } else { + } + else { throw vpRobotException(vpRobotException::readingParametersError, "Could not read translation and rotation " "parameters from config file %s", @@ -553,13 +554,13 @@ parameters are not found. void vpViper650::getCameraParameters(vpCameraParameters &cam, const unsigned int &image_width, const unsigned int &image_height) const { -#if defined(VISP_HAVE_VIPER650_DATA) +#if defined(VISP_HAVE_VIPER650_DATA) && defined(VISP_HAVE_PUGIXML) vpXmlParserCamera parser; switch (getToolType()) { case vpViper650::TOOL_MARLIN_F033C_CAMERA: { std::cout << "Get camera parameters for camera \"" << vpViper650::CONST_MARLIN_F033C_CAMERA_NAME << "\"" - << std::endl - << "from the XML file: \"" << vpViper650::CONST_CAMERA_FILENAME << "\"" << std::endl; + << std::endl + << "from the XML file: \"" << vpViper650::CONST_CAMERA_FILENAME << "\"" << std::endl; if (parser.parse(cam, vpViper650::CONST_CAMERA_FILENAME, vpViper650::CONST_MARLIN_F033C_CAMERA_NAME, projModel, image_width, image_height) != vpXmlParserCamera::SEQUENCE_OK) { throw vpRobotException(vpRobotException::readingParametersError, "Impossible to read the camera parameters."); @@ -568,8 +569,8 @@ void vpViper650::getCameraParameters(vpCameraParameters &cam, const unsigned int } case vpViper650::TOOL_PTGREY_FLEA2_CAMERA: { std::cout << "Get camera parameters for camera \"" << vpViper650::CONST_PTGREY_FLEA2_CAMERA_NAME << "\"" - << std::endl - << "from the XML file: \"" << vpViper650::CONST_CAMERA_FILENAME << "\"" << std::endl; + << std::endl + << "from the XML file: \"" << vpViper650::CONST_CAMERA_FILENAME << "\"" << std::endl; if (parser.parse(cam, vpViper650::CONST_CAMERA_FILENAME, vpViper650::CONST_PTGREY_FLEA2_CAMERA_NAME, projModel, image_width, image_height) != vpXmlParserCamera::SEQUENCE_OK) { throw vpRobotException(vpRobotException::readingParametersError, "Impossible to read the camera parameters."); @@ -578,8 +579,8 @@ void vpViper650::getCameraParameters(vpCameraParameters &cam, const unsigned int } case vpViper650::TOOL_SCHUNK_GRIPPER_CAMERA: { std::cout << "Get camera parameters for camera \"" << vpViper650::CONST_SCHUNK_GRIPPER_CAMERA_NAME << "\"" - << std::endl - << "from the XML file: \"" << vpViper650::CONST_CAMERA_FILENAME << "\"" << std::endl; + << std::endl + << "from the XML file: \"" << vpViper650::CONST_CAMERA_FILENAME << "\"" << std::endl; if (parser.parse(cam, vpViper650::CONST_CAMERA_FILENAME, vpViper650::CONST_SCHUNK_GRIPPER_CAMERA_NAME, projModel, image_width, image_height) != vpXmlParserCamera::SEQUENCE_OK) { throw vpRobotException(vpRobotException::readingParametersError, "Impossible to read the camera parameters."); @@ -588,7 +589,7 @@ void vpViper650::getCameraParameters(vpCameraParameters &cam, const unsigned int } case vpViper650::TOOL_GENERIC_CAMERA: { std::cout << "Get camera parameters for camera \"" << vpViper650::CONST_GENERIC_CAMERA_NAME << "\"" << std::endl - << "from the XML file: \"" << vpViper650::CONST_CAMERA_FILENAME << "\"" << std::endl; + << "from the XML file: \"" << vpViper650::CONST_CAMERA_FILENAME << "\"" << std::endl; if (parser.parse(cam, vpViper650::CONST_CAMERA_FILENAME, vpViper650::CONST_GENERIC_CAMERA_NAME, projModel, image_width, image_height) != vpXmlParserCamera::SEQUENCE_OK) { throw vpRobotException(vpRobotException::readingParametersError, "Impossible to read the camera parameters."); @@ -618,7 +619,7 @@ void vpViper650::getCameraParameters(vpCameraParameters &cam, const unsigned int // Set default intrinsic camera parameters for 640x480 images if (image_width == 640 && image_height == 480) { std::cout << "Get default camera parameters for camera \"" << vpViper650::CONST_MARLIN_F033C_CAMERA_NAME << "\"" - << std::endl; + << std::endl; switch (this->projModel) { case vpCameraParameters::perspectiveProjWithoutDistortion: cam.initPersProjWithoutDistortion(1232.0, 1233.0, 317.7, 253.9); @@ -631,7 +632,8 @@ void vpViper650::getCameraParameters(vpCameraParameters &cam, const unsigned int "Feature getCameraParameters is not implemented for Kannala-Brandt projection model yet."); break; } - } else { + } + else { vpTRACE("Cannot get default intrinsic camera parameters for this image " "resolution"); throw vpRobotException(vpRobotException::readingParametersError, "Impossible to read the camera parameters."); @@ -643,7 +645,7 @@ void vpViper650::getCameraParameters(vpCameraParameters &cam, const unsigned int // Set default intrinsic camera parameters for 640x480 images if (image_width == 640 && image_height == 480) { std::cout << "Get default camera parameters for camera \"" << vpViper650::CONST_PTGREY_FLEA2_CAMERA_NAME << "\"" - << std::endl; + << std::endl; switch (this->projModel) { case vpCameraParameters::perspectiveProjWithoutDistortion: cam.initPersProjWithoutDistortion(868.0, 869.0, 314.8, 254.1); @@ -656,7 +658,8 @@ void vpViper650::getCameraParameters(vpCameraParameters &cam, const unsigned int "Feature getCameraParameters is not implemented for Kannala-Brandt projection model yet."); break; } - } else { + } + else { vpTRACE("Cannot get default intrinsic camera parameters for this image " "resolution"); throw vpRobotException(vpRobotException::readingParametersError, "Impossible to read the camera parameters."); @@ -667,7 +670,7 @@ void vpViper650::getCameraParameters(vpCameraParameters &cam, const unsigned int // Set default intrinsic camera parameters for 640x480 images if (image_width == 640 && image_height == 480) { std::cout << "Get default camera parameters for camera \"" << vpViper650::CONST_GENERIC_CAMERA_NAME << "\"" - << std::endl; + << std::endl; switch (this->projModel) { case vpCameraParameters::perspectiveProjWithoutDistortion: cam.initPersProjWithoutDistortion(868.0, 869.0, 314.8, 254.1); @@ -680,7 +683,8 @@ void vpViper650::getCameraParameters(vpCameraParameters &cam, const unsigned int "Feature getCameraParameters is not implemented for Kannala-Brandt projection model yet."); break; } - } else { + } + else { vpTRACE("Cannot get default intrinsic camera parameters for this image " "resolution"); throw vpRobotException(vpRobotException::readingParametersError, "Impossible to read the camera parameters."); diff --git a/modules/robot/src/real-robot/viper/vpViper850.cpp b/modules/robot/src/real-robot/viper/vpViper850.cpp index d5951a1714..bce6df6ac4 100644 --- a/modules/robot/src/real-robot/viper/vpViper850.cpp +++ b/modules/robot/src/real-robot/viper/vpViper850.cpp @@ -46,40 +46,40 @@ #include #include -static const char *opt_viper850[] = {"CAMERA", "eMc_ROT_XYZ", "eMc_TRANS_XYZ", nullptr}; +static const char *opt_viper850[] = { "CAMERA", "eMc_ROT_XYZ", "eMc_TRANS_XYZ", nullptr }; #ifdef VISP_HAVE_VIPER850_DATA const std::string vpViper850::CONST_EMC_MARLIN_F033C_WITHOUT_DISTORTION_FILENAME = - std::string(VISP_VIPER850_DATA_PATH) + - std::string("/include/const_eMc_MarlinF033C_without_distortion_Viper850.cnf"); +std::string(VISP_VIPER850_DATA_PATH) + +std::string("/include/const_eMc_MarlinF033C_without_distortion_Viper850.cnf"); const std::string vpViper850::CONST_EMC_MARLIN_F033C_WITH_DISTORTION_FILENAME = - std::string(VISP_VIPER850_DATA_PATH) + std::string("/include/const_eMc_MarlinF033C_with_distortion_Viper850.cnf"); +std::string(VISP_VIPER850_DATA_PATH) + std::string("/include/const_eMc_MarlinF033C_with_distortion_Viper850.cnf"); const std::string vpViper850::CONST_EMC_PTGREY_FLEA2_WITHOUT_DISTORTION_FILENAME = - std::string(VISP_VIPER850_DATA_PATH) + - std::string("/include/const_eMc_PTGreyFlea2_without_distortion_Viper850.cnf"); +std::string(VISP_VIPER850_DATA_PATH) + +std::string("/include/const_eMc_PTGreyFlea2_without_distortion_Viper850.cnf"); const std::string vpViper850::CONST_EMC_PTGREY_FLEA2_WITH_DISTORTION_FILENAME = - std::string(VISP_VIPER850_DATA_PATH) + std::string("/include/const_eMc_PTGreyFlea2_with_distortion_Viper850.cnf"); +std::string(VISP_VIPER850_DATA_PATH) + std::string("/include/const_eMc_PTGreyFlea2_with_distortion_Viper850.cnf"); const std::string vpViper850::CONST_EMC_SCHUNK_GRIPPER_WITHOUT_DISTORTION_FILENAME = - std::string(VISP_VIPER850_DATA_PATH) + std::string("/include/" - "const_eMc_schunk_gripper_without_distortion_Viper850." - "cnf"); +std::string(VISP_VIPER850_DATA_PATH) + std::string("/include/" + "const_eMc_schunk_gripper_without_distortion_Viper850." + "cnf"); const std::string vpViper850::CONST_EMC_SCHUNK_GRIPPER_WITH_DISTORTION_FILENAME = - std::string(VISP_VIPER850_DATA_PATH) + - std::string("/include/const_eMc_schunk_gripper_with_distortion_Viper850.cnf"); +std::string(VISP_VIPER850_DATA_PATH) + +std::string("/include/const_eMc_schunk_gripper_with_distortion_Viper850.cnf"); const std::string vpViper850::CONST_EMC_GENERIC_WITHOUT_DISTORTION_FILENAME = - std::string(VISP_VIPER850_DATA_PATH) + std::string("/include/const_eMc_generic_without_distortion_Viper850.cnf"); +std::string(VISP_VIPER850_DATA_PATH) + std::string("/include/const_eMc_generic_without_distortion_Viper850.cnf"); const std::string vpViper850::CONST_EMC_GENERIC_WITH_DISTORTION_FILENAME = - std::string(VISP_VIPER850_DATA_PATH) + std::string("/include/const_eMc_generic_with_distortion_Viper850.cnf"); +std::string(VISP_VIPER850_DATA_PATH) + std::string("/include/const_eMc_generic_with_distortion_Viper850.cnf"); const std::string vpViper850::CONST_CAMERA_FILENAME = - std::string(VISP_VIPER850_DATA_PATH) + std::string("/include/const_camera_Viper850.xml"); +std::string(VISP_VIPER850_DATA_PATH) + std::string("/include/const_camera_Viper850.xml"); #endif // VISP_HAVE_VIPER850_DATA @@ -475,7 +475,8 @@ void vpViper850::parseConfigFile(const std::string &filename) // Compute the eMc matrix from the translations and rotations if (get_etc && get_erc) { this->set_eMc(etc_, erc_); - } else { + } + else { throw vpRobotException(vpRobotException::readingParametersError, "Could not read translation and rotation " "parameters from config file %s", @@ -555,13 +556,13 @@ parameters are not found. void vpViper850::getCameraParameters(vpCameraParameters &cam, const unsigned int &image_width, const unsigned int &image_height) const { -#if defined(VISP_HAVE_VIPER850_DATA) +#if defined(VISP_HAVE_VIPER850_DATA) && defined(VISP_HAVE_PUGIXML) vpXmlParserCamera parser; switch (getToolType()) { case vpViper850::TOOL_MARLIN_F033C_CAMERA: { std::cout << "Get camera parameters for camera \"" << vpViper850::CONST_MARLIN_F033C_CAMERA_NAME << "\"" - << std::endl - << "from the XML file: \"" << vpViper850::CONST_CAMERA_FILENAME << "\"" << std::endl; + << std::endl + << "from the XML file: \"" << vpViper850::CONST_CAMERA_FILENAME << "\"" << std::endl; if (parser.parse(cam, vpViper850::CONST_CAMERA_FILENAME, vpViper850::CONST_MARLIN_F033C_CAMERA_NAME, projModel, image_width, image_height) != vpXmlParserCamera::SEQUENCE_OK) { throw vpRobotException(vpRobotException::readingParametersError, "Impossible to read the camera parameters."); @@ -570,8 +571,8 @@ void vpViper850::getCameraParameters(vpCameraParameters &cam, const unsigned int } case vpViper850::TOOL_PTGREY_FLEA2_CAMERA: { std::cout << "Get camera parameters for camera \"" << vpViper850::CONST_PTGREY_FLEA2_CAMERA_NAME << "\"" - << std::endl - << "from the XML file: \"" << vpViper850::CONST_CAMERA_FILENAME << "\"" << std::endl; + << std::endl + << "from the XML file: \"" << vpViper850::CONST_CAMERA_FILENAME << "\"" << std::endl; if (parser.parse(cam, vpViper850::CONST_CAMERA_FILENAME, vpViper850::CONST_PTGREY_FLEA2_CAMERA_NAME, projModel, image_width, image_height) != vpXmlParserCamera::SEQUENCE_OK) { throw vpRobotException(vpRobotException::readingParametersError, "Impossible to read the camera parameters."); @@ -580,8 +581,8 @@ void vpViper850::getCameraParameters(vpCameraParameters &cam, const unsigned int } case vpViper850::TOOL_SCHUNK_GRIPPER_CAMERA: { std::cout << "Get camera parameters for camera \"" << vpViper850::CONST_SCHUNK_GRIPPER_CAMERA_NAME << "\"" - << std::endl - << "from the XML file: \"" << vpViper850::CONST_CAMERA_FILENAME << "\"" << std::endl; + << std::endl + << "from the XML file: \"" << vpViper850::CONST_CAMERA_FILENAME << "\"" << std::endl; if (parser.parse(cam, vpViper850::CONST_CAMERA_FILENAME, vpViper850::CONST_SCHUNK_GRIPPER_CAMERA_NAME, projModel, image_width, image_height) != vpXmlParserCamera::SEQUENCE_OK) { throw vpRobotException(vpRobotException::readingParametersError, "Impossible to read the camera parameters."); @@ -590,7 +591,7 @@ void vpViper850::getCameraParameters(vpCameraParameters &cam, const unsigned int } case vpViper850::TOOL_GENERIC_CAMERA: { std::cout << "Get camera parameters for camera \"" << vpViper850::CONST_GENERIC_CAMERA_NAME << "\"" << std::endl - << "from the XML file: \"" << vpViper850::CONST_CAMERA_FILENAME << "\"" << std::endl; + << "from the XML file: \"" << vpViper850::CONST_CAMERA_FILENAME << "\"" << std::endl; if (parser.parse(cam, vpViper850::CONST_CAMERA_FILENAME, vpViper850::CONST_GENERIC_CAMERA_NAME, projModel, image_width, image_height) != vpXmlParserCamera::SEQUENCE_OK) { throw vpRobotException(vpRobotException::readingParametersError, "Impossible to read the camera parameters."); @@ -620,7 +621,7 @@ void vpViper850::getCameraParameters(vpCameraParameters &cam, const unsigned int // Set default intrinsic camera parameters for 640x480 images if (image_width == 640 && image_height == 480) { std::cout << "Get default camera parameters for camera \"" << vpViper850::CONST_MARLIN_F033C_CAMERA_NAME << "\"" - << std::endl; + << std::endl; switch (this->projModel) { case vpCameraParameters::perspectiveProjWithoutDistortion: cam.initPersProjWithoutDistortion(1232.0, 1233.0, 317.7, 253.9); @@ -633,7 +634,8 @@ void vpViper850::getCameraParameters(vpCameraParameters &cam, const unsigned int "Feature getCameraParameters is not implemented for Kannala-Brandt projection model yet."); break; } - } else { + } + else { vpTRACE("Cannot get default intrinsic camera parameters for this image " "resolution"); throw vpRobotException(vpRobotException::readingParametersError, "Impossible to read the camera parameters."); @@ -645,7 +647,7 @@ void vpViper850::getCameraParameters(vpCameraParameters &cam, const unsigned int // Set default intrinsic camera parameters for 640x480 images if (image_width == 640 && image_height == 480) { std::cout << "Get default camera parameters for camera \"" << vpViper850::CONST_PTGREY_FLEA2_CAMERA_NAME << "\"" - << std::endl; + << std::endl; switch (this->projModel) { case vpCameraParameters::perspectiveProjWithoutDistortion: cam.initPersProjWithoutDistortion(868.0, 869.0, 314.8, 254.1); @@ -658,7 +660,8 @@ void vpViper850::getCameraParameters(vpCameraParameters &cam, const unsigned int "Feature getCameraParameters is not implemented for Kannala-Brandt projection model yet."); break; } - } else { + } + else { vpTRACE("Cannot get default intrinsic camera parameters for this image " "resolution"); throw vpRobotException(vpRobotException::readingParametersError, "Impossible to read the camera parameters."); @@ -669,7 +672,7 @@ void vpViper850::getCameraParameters(vpCameraParameters &cam, const unsigned int // Set default intrinsic camera parameters for 640x480 images if (image_width == 640 && image_height == 480) { std::cout << "Get default camera parameters for camera \"" << vpViper850::CONST_GENERIC_CAMERA_NAME << "\"" - << std::endl; + << std::endl; switch (this->projModel) { case vpCameraParameters::perspectiveProjWithoutDistortion: cam.initPersProjWithoutDistortion(868.0, 869.0, 314.8, 254.1); @@ -682,7 +685,8 @@ void vpViper850::getCameraParameters(vpCameraParameters &cam, const unsigned int "Feature getCameraParameters is not implemented for Kannala-Brandt projection model yet."); break; } - } else { + } + else { vpTRACE("Cannot get default intrinsic camera parameters for this image " "resolution"); throw vpRobotException(vpRobotException::readingParametersError, "Impossible to read the camera parameters."); diff --git a/modules/sensor/src/rgb-depth/kinect/vpKinect.cpp b/modules/sensor/src/rgb-depth/kinect/vpKinect.cpp index 79af83887b..6666baaa81 100644 --- a/modules/sensor/src/rgb-depth/kinect/vpKinect.cpp +++ b/modules/sensor/src/rgb-depth/kinect/vpKinect.cpp @@ -52,8 +52,8 @@ */ vpKinect::vpKinect(freenect_context *ctx, int index) : Freenect::FreenectDevice(ctx, index), m_rgb_mutex(), m_depth_mutex(), RGBcam(), IRcam(), rgbMir(), irMrgb(), - DMres(DMAP_LOW_RES), hd(240), wd(320), dmap(), IRGB(), m_new_rgb_frame(false), m_new_depth_map(false), - m_new_depth_image(false), height(480), width(640) + DMres(DMAP_LOW_RES), hd(240), wd(320), dmap(), IRGB(), m_new_rgb_frame(false), m_new_depth_map(false), + m_new_depth_image(false), height(480), width(640) { dmap.resize(height, width); IRGB.resize(height, width); @@ -68,7 +68,7 @@ vpKinect::vpKinect(freenect_context *ctx, int index) /*! Destructor. */ -vpKinect::~vpKinect() {} +vpKinect::~vpKinect() { } void vpKinect::start(vpKinect::vpDMResolution res) { @@ -82,7 +82,8 @@ void vpKinect::start(vpKinect::vpDMResolution res) IRcam.initPersProjWithDistortion(303.06, 297.89, 160.75, 117.9, -0.27, 0); hd = 240; wd = 320; - } else { + } + else { std::cout << "vpKinect::start MEDIUM depth map resolution 480x640" << std::endl; IRcam.initPersProjWithDistortion(606.12, 595.78, 321.5, 235.8, -0.27, 0); @@ -91,7 +92,7 @@ void vpKinect::start(vpKinect::vpDMResolution res) wd = 640; } -#if defined(VISP_HAVE_VIPER850_DATA) +#if defined(VISP_HAVE_VIPER850_DATA) && defined(VISP_HAVE_PUGIXML) vpXmlParserCamera cameraParser; std::string cameraXmlFile = std::string(VISP_VIPER850_DATA_PATH) + std::string("/include/const_camera_Viper850.xml"); cameraParser.parse(RGBcam, cameraXmlFile, "Generic-camera", vpCameraParameters::perspectiveProjWithDistortion, width, @@ -150,8 +151,8 @@ void vpKinect::DepthCallback(void *depth, uint32_t /* timestamp */) for (unsigned i = 0; i < height; i++) { for (unsigned j = 0; j < width; j++) { dmap[i][j] = - 0.1236f * tan(depth_[width * i + j] / 2842.5f + 1.1863f); // formula from - // http://openkinect.org/wiki/Imaging_Information + 0.1236f * tan(depth_[width * i + j] / 2842.5f + 1.1863f); // formula from + // http://openkinect.org/wiki/Imaging_Information if (depth_[width * i + j] > 1023) { // Depth cannot be computed dmap[i][j] = -1; } @@ -203,7 +204,8 @@ bool vpKinect::getDepthMap(vpImage &map, vpImage &Imap) else Imap[i][j] = 255; } - } else { + } + else { for (unsigned i = 0; i < height; i++) for (unsigned j = 0; j < width; j++) { map[i][j] = tempMap[i][j]; @@ -239,7 +241,8 @@ void vpKinect::warpRGBFrame(const vpImage &Irgb, const vpImage &I { if ((Idepth.getHeight() != hd) || (Idepth.getWidth() != wd)) { vpERROR_TRACE(1, "Idepth image size does not match vpKinect DM resolution"); - } else { + } + else { if ((IrgbWarped.getHeight() != hd) || (IrgbWarped.getWidth() != wd)) IrgbWarped.resize(hd, wd); IrgbWarped = 0; @@ -267,7 +270,8 @@ void vpKinect::warpRGBFrame(const vpImage &Irgb, const vpImage &I if (std::fabs(Z2) > std::numeric_limits::epsilon()) { x2 = P2[0] / P2[2]; y2 = P2[1] / P2[2]; - } else + } + else std::cout << "Z2 = 0 !!" << std::endl; //! compute pixel coordinates of the corresponding point in the @@ -279,7 +283,8 @@ void vpKinect::warpRGBFrame(const vpImage &Irgb, const vpImage &I //! Fill warped image value if ((u_ < width) && (v_ < height)) { IrgbWarped[i][j] = Irgb[v_][u_]; - } else + } + else IrgbWarped[i][j] = 0; } } @@ -289,5 +294,5 @@ void vpKinect::warpRGBFrame(const vpImage &Irgb, const vpImage &I #elif !defined(VISP_BUILD_SHARED_LIBS) // Work around to avoid warning: libvisp_sensor.a(vpKinect.cpp.o) has no // symbols -void dummy_vpKinect(){}; +void dummy_vpKinect() { }; #endif // VISP_HAVE_LIBFREENECT diff --git a/modules/tracker/mbt/CMakeLists.txt b/modules/tracker/mbt/CMakeLists.txt index c5e5e7cb72..9b87e7c378 100644 --- a/modules/tracker/mbt/CMakeLists.txt +++ b/modules/tracker/mbt/CMakeLists.txt @@ -36,6 +36,7 @@ # Add optional 3rd parties set(opt_incs "") set(opt_libs "") +set(opt_libs_private "") if(USE_COIN3D AND NOT HAVE_visp_ar) if(WIN32) @@ -122,22 +123,26 @@ endif() if(WITH_CLIPPER) # clipper is private include_directories(${CLIPPER_INCLUDE_DIRS}) + list(APPEND opt_libs_private ${CLIPPER_LIBRARIES}) endif() # pugixml is always enabled to provide default XML I/O capabilities # pugixml is private include_directories(${PUGIXML_INCLUDE_DIRS}) +list(APPEND opt_libs_private ${PUGIXML_LIBRARIES}) if(WITH_CATCH2) # catch2 is private include_directories(${CATCH2_INCLUDE_DIRS}) endif() -# simdlib is always enabled since it contains fallback code to plain C++ code -# Simd lib is private -include_directories(${SIMDLIB_INCLUDE_DIRS}) +if(WITH_SIMDLIB) + # Simd lib is private + include_directories(${SIMDLIB_INCLUDE_DIRS}) + list(APPEND opt_libs_private ${SIMDLIB_LIBRARIES}) +endif() -vp_add_module(mbt visp_vision visp_core visp_me visp_visual_features OPTIONAL visp_ar visp_klt visp_gui PRIVATE_OPTIONAL ${CLIPPER_LIBRARIES} ${PUGIXML_LIBRARIES} ${SIMDLIB_LIBRARIES} WRAP java) +vp_add_module(mbt visp_vision visp_core visp_me visp_visual_features OPTIONAL visp_ar visp_klt visp_gui PRIVATE_OPTIONAL ${opt_libs_private} WRAP java) vp_glob_module_sources() if(USE_OGRE) diff --git a/modules/tracker/mbt/include/visp3/mbt/vpMbtXmlGenericParser.h b/modules/tracker/mbt/include/visp3/mbt/vpMbtXmlGenericParser.h index fff258040e..e9de44f18e 100644 --- a/modules/tracker/mbt/include/visp3/mbt/vpMbtXmlGenericParser.h +++ b/modules/tracker/mbt/include/visp3/mbt/vpMbtXmlGenericParser.h @@ -44,6 +44,7 @@ #include +#if defined(VISP_HAVE_PUGIXML) #include #include #include @@ -60,7 +61,8 @@ class VISP_EXPORT vpMbtXmlGenericParser { public: - enum vpParserType { + enum vpParserType + { EDGE_PARSER = 1 << 0, /*!< Parser for model-based tracking using moving edges features. */ KLT_PARSER = 1 << 1, /*!< Parser for model-based tracking using KLT features. */ @@ -164,3 +166,4 @@ class VISP_EXPORT vpMbtXmlGenericParser }; #endif +#endif diff --git a/modules/tracker/mbt/src/depth/vpMbDepthDenseTracker.cpp b/modules/tracker/mbt/src/depth/vpMbDepthDenseTracker.cpp index ca74cdc202..a90fa06cd6 100644 --- a/modules/tracker/mbt/src/depth/vpMbDepthDenseTracker.cpp +++ b/modules/tracker/mbt/src/depth/vpMbDepthDenseTracker.cpp @@ -54,11 +54,11 @@ vpMbDepthDenseTracker::vpMbDepthDenseTracker() : m_depthDenseHiddenFacesDisplay(), m_depthDenseListOfActiveFaces(), m_denseDepthNbFeatures(0), m_depthDenseFaces(), - m_depthDenseSamplingStepX(2), m_depthDenseSamplingStepY(2), m_error_depthDense(), m_L_depthDense(), - m_robust_depthDense(), m_w_depthDense(), m_weightedError_depthDense() + m_depthDenseSamplingStepX(2), m_depthDenseSamplingStepY(2), m_error_depthDense(), m_L_depthDense(), + m_robust_depthDense(), m_w_depthDense(), m_weightedError_depthDense() #if DEBUG_DISPLAY_DEPTH_DENSE - , - m_debugDisp_depthDense(nullptr), m_debugImage_depthDense() + , + m_debugDisp_depthDense(nullptr), m_debugImage_depthDense() #endif { #ifdef VISP_HAVE_OGRE @@ -287,7 +287,7 @@ void vpMbDepthDenseTracker::display(const vpImage &I, const vpHom bool displayFullModel) { std::vector > models = - vpMbDepthDenseTracker::getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel); + vpMbDepthDenseTracker::getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel); for (size_t i = 0; i < models.size(); i++) { if (vpMath::equal(models[i][0], 0)) { @@ -303,7 +303,7 @@ void vpMbDepthDenseTracker::display(const vpImage &I, const vpHomogeneou bool displayFullModel) { std::vector > models = - vpMbDepthDenseTracker::getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel); + vpMbDepthDenseTracker::getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel); for (size_t i = 0; i < models.size(); i++) { if (vpMath::equal(models[i][0], 0)) { @@ -352,7 +352,7 @@ std::vector > vpMbDepthDenseTracker::getModelForDisplay(unsi ++it) { vpMbtFaceDepthDense *face_dense = *it; std::vector > modelLines = - face_dense->getModelForDisplay(width, height, cMo, cam, displayFullModel); + face_dense->getModelForDisplay(width, height, cMo, cam, displayFullModel); models.insert(models.end(), modelLines.begin(), modelLines.end()); } @@ -368,7 +368,8 @@ void vpMbDepthDenseTracker::init(const vpImage &I) bool reInitialisation = false; if (!useOgre) { faces.setVisible(I.getWidth(), I.getHeight(), m_cam, m_cMo, angleAppears, angleDisappears, reInitialisation); - } else { + } + else { #ifdef VISP_HAVE_OGRE if (!faces.isOgreInitialised()) { faces.setBackgroundSizeOgre(I.getHeight(), I.getWidth()); @@ -394,6 +395,7 @@ void vpMbDepthDenseTracker::init(const vpImage &I) void vpMbDepthDenseTracker::loadConfigFile(const std::string &configFile, bool verbose) { +#if defined(VISP_HAVE_PUGIXML) vpMbtXmlGenericParser xmlp(vpMbtXmlGenericParser::DEPTH_DENSE_PARSER); xmlp.setVerbose(verbose); xmlp.setCameraParameters(m_cam); @@ -408,7 +410,8 @@ void vpMbDepthDenseTracker::loadConfigFile(const std::string &configFile, bool v std::cout << " *********** Parsing XML for Mb Depth Dense Tracker ************ " << std::endl; } xmlp.parse(configFile); - } catch (const vpException &e) { + } + catch (const vpException &e) { std::cerr << "Exception: " << e.what() << std::endl; throw vpException(vpException::ioError, "Cannot open XML file \"%s\"", configFile.c_str()); } @@ -430,6 +433,11 @@ void vpMbDepthDenseTracker::loadConfigFile(const std::string &configFile, bool v setClipping(clippingFlag | vpPolygon3D::FOV_CLIPPING); setDepthDenseSamplingStep(xmlp.getDepthDenseSamplingStepX(), xmlp.getDepthDenseSamplingStepY()); +#else + (void)configFile; + (void)verbose; + throw(vpException(vpException::ioError, "vpMbDepthDenseTracker::loadConfigFile() needs pugixml built-in 3rdparty")); +#endif } void vpMbDepthDenseTracker::reInitModel(const vpImage &I, const std::string &cad_name, @@ -710,7 +718,7 @@ void vpMbDepthDenseTracker::setUseDepthDenseTracking(const std::string &name, co } } -void vpMbDepthDenseTracker::testTracking() {} +void vpMbDepthDenseTracker::testTracking() { } void vpMbDepthDenseTracker::track(const vpImage &) { diff --git a/modules/tracker/mbt/src/depth/vpMbDepthNormalTracker.cpp b/modules/tracker/mbt/src/depth/vpMbDepthNormalTracker.cpp index 0901245fca..ee0ab6c2a9 100644 --- a/modules/tracker/mbt/src/depth/vpMbDepthNormalTracker.cpp +++ b/modules/tracker/mbt/src/depth/vpMbDepthNormalTracker.cpp @@ -54,14 +54,14 @@ vpMbDepthNormalTracker::vpMbDepthNormalTracker() : m_depthNormalFeatureEstimationMethod(vpMbtFaceDepthNormal::ROBUST_FEATURE_ESTIMATION), - m_depthNormalHiddenFacesDisplay(), m_depthNormalListOfActiveFaces(), m_depthNormalListOfDesiredFeatures(), - m_depthNormalFaces(), m_depthNormalPclPlaneEstimationMethod(2), m_depthNormalPclPlaneEstimationRansacMaxIter(200), - m_depthNormalPclPlaneEstimationRansacThreshold(0.001), m_depthNormalSamplingStepX(2), m_depthNormalSamplingStepY(2), - m_depthNormalUseRobust(false), m_error_depthNormal(), m_featuresToBeDisplayedDepthNormal(), m_L_depthNormal(), - m_robust_depthNormal(), m_w_depthNormal(), m_weightedError_depthNormal() + m_depthNormalHiddenFacesDisplay(), m_depthNormalListOfActiveFaces(), m_depthNormalListOfDesiredFeatures(), + m_depthNormalFaces(), m_depthNormalPclPlaneEstimationMethod(2), m_depthNormalPclPlaneEstimationRansacMaxIter(200), + m_depthNormalPclPlaneEstimationRansacThreshold(0.001), m_depthNormalSamplingStepX(2), m_depthNormalSamplingStepY(2), + m_depthNormalUseRobust(false), m_error_depthNormal(), m_featuresToBeDisplayedDepthNormal(), m_L_depthNormal(), + m_robust_depthNormal(), m_w_depthNormal(), m_weightedError_depthNormal() #if DEBUG_DISPLAY_DEPTH_NORMAL - , - m_debugDisp_depthNormal(nullptr), m_debugImage_depthNormal() + , + m_debugDisp_depthNormal(nullptr), m_debugImage_depthNormal() #endif { #ifdef VISP_HAVE_OGRE @@ -282,7 +282,7 @@ void vpMbDepthNormalTracker::display(const vpImage &I, const vpHo bool displayFullModel) { std::vector > models = - vpMbDepthNormalTracker::getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel); + vpMbDepthNormalTracker::getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel); for (size_t i = 0; i < models.size(); i++) { if (vpMath::equal(models[i][0], 0)) { @@ -308,7 +308,7 @@ void vpMbDepthNormalTracker::display(const vpImage &I, const vpHomogeneo bool displayFullModel) { std::vector > models = - vpMbDepthNormalTracker::getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel); + vpMbDepthNormalTracker::getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel); for (size_t i = 0; i < models.size(); i++) { if (vpMath::equal(models[i][0], 0)) { @@ -381,7 +381,7 @@ std::vector > vpMbDepthNormalTracker::getModelForDisplay(uns it != m_depthNormalFaces.end(); ++it) { vpMbtFaceDepthNormal *face_normal = *it; std::vector > modelLines = - face_normal->getModelForDisplay(width, height, cMo, cam, displayFullModel); + face_normal->getModelForDisplay(width, height, cMo, cam, displayFullModel); models.insert(models.end(), modelLines.begin(), modelLines.end()); } @@ -397,7 +397,8 @@ void vpMbDepthNormalTracker::init(const vpImage &I) bool reInitialisation = false; if (!useOgre) { faces.setVisible(I.getWidth(), I.getHeight(), m_cam, m_cMo, angleAppears, angleDisappears, reInitialisation); - } else { + } + else { #ifdef VISP_HAVE_OGRE if (!faces.isOgreInitialised()) { faces.setBackgroundSizeOgre(I.getHeight(), I.getWidth()); @@ -423,6 +424,7 @@ void vpMbDepthNormalTracker::init(const vpImage &I) void vpMbDepthNormalTracker::loadConfigFile(const std::string &configFile, bool verbose) { +#if defined(VISP_HAVE_PUGIXML) vpMbtXmlGenericParser xmlp(vpMbtXmlGenericParser::DEPTH_NORMAL_PARSER); xmlp.setVerbose(verbose); xmlp.setCameraParameters(m_cam); @@ -441,7 +443,8 @@ void vpMbDepthNormalTracker::loadConfigFile(const std::string &configFile, bool std::cout << " *********** Parsing XML for Mb Depth Tracker ************ " << std::endl; } xmlp.parse(configFile); - } catch (const vpException &e) { + } + catch (const vpException &e) { std::cerr << "Exception: " << e.what() << std::endl; throw vpException(vpException::ioError, "Cannot open XML file \"%s\"", configFile.c_str()); } @@ -467,6 +470,11 @@ void vpMbDepthNormalTracker::loadConfigFile(const std::string &configFile, bool setDepthNormalPclPlaneEstimationRansacMaxIter(xmlp.getDepthNormalPclPlaneEstimationRansacMaxIter()); setDepthNormalPclPlaneEstimationRansacThreshold(xmlp.getDepthNormalPclPlaneEstimationRansacThreshold()); setDepthNormalSamplingStep(xmlp.getDepthNormalSamplingStepX(), xmlp.getDepthNormalSamplingStepY()); +#else + (void)configFile; + (void)verbose; + throw(vpException(vpException::ioError, "vpMbDepthDenseTracker::loadConfigFile() needs pugixml built-in 3rdparty")); +#endif } void vpMbDepthNormalTracker::reInitModel(const vpImage &I, const std::string &cad_name, @@ -584,7 +592,7 @@ void vpMbDepthNormalTracker::setUseDepthNormalTracking(const std::string &name, } } -void vpMbDepthNormalTracker::testTracking() {} +void vpMbDepthNormalTracker::testTracking() { } #ifdef VISP_HAVE_PCL void vpMbDepthNormalTracker::segmentPointCloud(const pcl::PointCloud::ConstPtr &point_cloud) diff --git a/modules/tracker/mbt/src/edge/vpMbEdgeTracker.cpp b/modules/tracker/mbt/src/edge/vpMbEdgeTracker.cpp index 663fa223f0..f55f12ab1b 100644 --- a/modules/tracker/mbt/src/edge/vpMbEdgeTracker.cpp +++ b/modules/tracker/mbt/src/edge/vpMbEdgeTracker.cpp @@ -63,10 +63,10 @@ */ vpMbEdgeTracker::vpMbEdgeTracker() : me(), lines(1), circles(1), cylinders(1), nline(0), ncircle(0), ncylinder(0), nbvisiblepolygone(0), - percentageGdPt(0.4), scales(1), Ipyramid(0), scaleLevel(0), nbFeaturesForProjErrorComputation(0), m_factor(), - m_robustLines(), m_robustCylinders(), m_robustCircles(), m_wLines(), m_wCylinders(), m_wCircles(), m_errorLines(), - m_errorCylinders(), m_errorCircles(), m_L_edge(), m_error_edge(), m_w_edge(), m_weightedError_edge(), - m_robust_edge(), m_featuresToBeDisplayedEdge() + percentageGdPt(0.4), scales(1), Ipyramid(0), scaleLevel(0), nbFeaturesForProjErrorComputation(0), m_factor(), + m_robustLines(), m_robustCylinders(), m_robustCircles(), m_wLines(), m_wCylinders(), m_wCircles(), m_errorLines(), + m_errorCylinders(), m_errorCircles(), m_L_edge(), m_error_edge(), m_w_edge(), m_weightedError_edge(), + m_robust_edge(), m_featuresToBeDisplayedEdge() { scales[0] = true; @@ -251,7 +251,8 @@ void vpMbEdgeTracker::computeVVS(const vpImage &_I, unsigned int m_L_edge[i][j] = wi * m_L_edge[i][j]; } } - } else { + } + else { for (unsigned int i = 0; i < nbrow; i++) { wi = m_w_edge[i] * m_factor[i]; W_true[i] = wi; @@ -354,7 +355,8 @@ void vpMbEdgeTracker::computeVVSFirstPhase(const vpImage &_I, uns m_w_edge[n + i] = 1 /*0.5*/; } e_prev = e_cur; - } else + } + else m_w_edge[n + i] = 1; } @@ -415,7 +417,8 @@ void vpMbEdgeTracker::computeVVSFirstPhase(const vpImage &_I, uns if (i < cy->nbFeaturel1) { site = *itCyl1; ++itCyl1; - } else { + } + else { site = *itCyl2; ++itCyl2; } @@ -432,7 +435,8 @@ void vpMbEdgeTracker::computeVVSFirstPhase(const vpImage &_I, uns m_w_edge[n + i] = 1 /*0.5*/; } e_prev = e_cur; - } else + } + else m_w_edge[n + i] = 1; } if (i == cy->nbFeaturel1) { @@ -443,7 +447,8 @@ void vpMbEdgeTracker::computeVVSFirstPhase(const vpImage &_I, uns m_w_edge[n + i] = 1 /*0.5*/; } e_prev = e_cur; - } else + } + else m_w_edge[n + i] = 1; } @@ -518,7 +523,8 @@ void vpMbEdgeTracker::computeVVSFirstPhase(const vpImage &_I, uns m_w_edge[n + i] = 1 /*0.5*/; } e_prev = e_cur; - } else + } + else m_w_edge[n + i] = 1; } @@ -611,7 +617,8 @@ void vpMbEdgeTracker::computeVVSFirstPhaseFactor(const vpImage &I if (i < cy->nbFeaturel1) { site = *itCyl1; ++itCyl1; - } else { + } + else { site = *itCyl2; ++itCyl2; } @@ -662,7 +669,8 @@ void vpMbEdgeTracker::computeVVSFirstPhasePoseEstimation(unsigned int iter, bool m_L_edge[i][j] = wi * m_L_edge[i][j]; } } - } else { + } + else { for (unsigned int i = 0; i < nerror; i++) { wi = m_w_edge[i] * m_factor[i]; eri = m_error_edge[i]; @@ -702,7 +710,8 @@ void vpMbEdgeTracker::computeVVSFirstPhasePoseEstimation(unsigned int iter, bool LTL = m_L_edge.AtA(); computeJTR(m_L_edge, m_weightedError_edge, LTR); v = -0.7 * LTL.pseudoInverse(LTL.getRows() * std::numeric_limits::epsilon()) * LTR; - } else { + } + else { cVo.buildFrom(m_cMo); vpMatrix LVJ = (m_L_edge * cVo * oJo); vpMatrix LVJTLVJ = (LVJ).AtA(); @@ -830,7 +839,7 @@ void vpMbEdgeTracker::computeVVSInteractionMatrixAndResidu(const vpImage 0) m_robustLines.MEstimator(vpRobust::TUKEY, m_errorLines, m_wLines); @@ -916,7 +925,8 @@ void vpMbEdgeTracker::computeProjectionError(const vpImage &_I) if (nbFeatures > 0) { projectionError = vpMath::deg(projectionError / (double)nbFeatures); - } else { + } + else { projectionError = 90.0; } @@ -1003,8 +1013,8 @@ void vpMbEdgeTracker::testTracking() if (nbGoodPoint < nb_min || nbExpectedPoint < 2) { std::ostringstream oss; oss << "Not enough moving edges (" << nbGoodPoint << ") to track the object: expected " << nb_min - << ". Try to reduce the threshold=" << percentageGdPt - << " using vpMbTracker::setGoodMovingEdgesRatioThreshold()"; + << ". Try to reduce the threshold=" << percentageGdPt + << " using vpMbTracker::setGoodMovingEdgesRatioThreshold()"; throw vpTrackingException(vpTrackingException::fatalError, oss.str()); } } @@ -1033,7 +1043,8 @@ void vpMbEdgeTracker::track(const vpImage &I) try { trackMovingEdge(*Ipyramid[lvl]); - } catch (...) { + } + catch (...) { vpTRACE("Error in moving edge tracking"); throw; } @@ -1064,7 +1075,8 @@ void vpMbEdgeTracker::track(const vpImage &I) try { computeVVS(*Ipyramid[lvl], lvl); - } catch (...) { + } + catch (...) { covarianceMatrix = -1; throw; // throw the original exception } @@ -1095,12 +1107,14 @@ void vpMbEdgeTracker::track(const vpImage &I) computeProjectionError(I); upScale(lvl); - } catch (const vpException &e) { + } + catch (const vpException &e) { if (lvl != 0) { m_cMo = cMo_1; reInitLevel(lvl); upScale(lvl); - } else { + } + else { upScale(lvl); throw(e); } @@ -1214,7 +1228,8 @@ void vpMbEdgeTracker::setPose(const vpImage &I_color, const vpHomogeneou */ void vpMbEdgeTracker::loadConfigFile(const std::string &configFile, bool verbose) { - // Load projection error config +#if defined(VISP_HAVE_PUGIXML) +// Load projection error config vpMbTracker::loadConfigFile(configFile, verbose); vpMbtXmlGenericParser xmlp(vpMbtXmlGenericParser::EDGE_PARSER); @@ -1229,7 +1244,8 @@ void vpMbEdgeTracker::loadConfigFile(const std::string &configFile, bool verbose std::cout << " *********** Parsing XML for Mb Edge Tracker ************ " << std::endl; } xmlp.parse(configFile); - } catch (...) { + } + catch (...) { throw vpException(vpException::ioError, "Cannot open XML file \"%s\"", configFile.c_str()); } @@ -1263,6 +1279,11 @@ void vpMbEdgeTracker::loadConfigFile(const std::string &configFile, bool verbose setMinLineLengthThresh(minLineLengthThresholdGeneral); setMinPolygonAreaThresh(minPolygonAreaThresholdGeneral); } +#else + (void)configFile; + (void)verbose; + throw(vpException(vpException::ioError, "vpMbEdgeTracker::loadConfigFile() needs pugixml built-in 3rdparty")); +#endif } /*! @@ -1286,14 +1307,15 @@ void vpMbEdgeTracker::display(const vpImage &I, const vpHomogeneo } std::vector > models = - vpMbEdgeTracker::getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel); + vpMbEdgeTracker::getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel); for (size_t i = 0; i < models.size(); i++) { if (vpMath::equal(models[i][0], 0)) { vpImagePoint ip1(models[i][1], models[i][2]); vpImagePoint ip2(models[i][3], models[i][4]); vpDisplay::displayLine(I, ip1, ip2, col, thickness); - } else if (vpMath::equal(models[i][0], 1)) { + } + else if (vpMath::equal(models[i][0], 1)) { vpImagePoint center(models[i][1], models[i][2]); double n20 = models[i][3]; double n11 = models[i][4]; @@ -1328,14 +1350,15 @@ void vpMbEdgeTracker::display(const vpImage &I, const vpHomogeneousMatri } std::vector > models = - vpMbEdgeTracker::getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel); + vpMbEdgeTracker::getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel); for (size_t i = 0; i < models.size(); i++) { if (vpMath::equal(models[i][0], 0)) { vpImagePoint ip1(models[i][1], models[i][2]); vpImagePoint ip2(models[i][3], models[i][4]); vpDisplay::displayLine(I, ip1, ip2, col, thickness); - } else if (vpMath::equal(models[i][0], 1)) { + } + else if (vpMath::equal(models[i][0], 1)) { vpImagePoint center(models[i][1], models[i][2]); double n20 = models[i][3]; double n11 = models[i][4]; @@ -1410,14 +1433,14 @@ std::vector > vpMbEdgeTracker::getModelForDisplay(unsigned i for (std::list::const_iterator it = lines[scaleLevel].begin(); it != lines[scaleLevel].end(); ++it) { std::vector > currentModel = - (*it)->getModelForDisplay(width, height, cMo, cam, displayFullModel); + (*it)->getModelForDisplay(width, height, cMo, cam, displayFullModel); models.insert(models.end(), currentModel.begin(), currentModel.end()); } for (std::list::const_iterator it = cylinders[scaleLevel].begin(); it != cylinders[scaleLevel].end(); ++it) { std::vector > currentModel = - (*it)->getModelForDisplay(width, height, cMo, cam, displayFullModel); + (*it)->getModelForDisplay(width, height, cMo, cam, displayFullModel); models.insert(models.end(), currentModel.begin(), currentModel.end()); } @@ -1542,7 +1565,8 @@ void vpMbEdgeTracker::initMovingEdge(const vpImage &I, const vpHo l->updateTracked(); if (l->meline.empty() && l->isTracked()) l->initMovingEdge(I, _cMo, doNotTrack, m_mask); - } else { + } + else { l->setVisible(false); for (size_t a = 0; a < l->meline.size(); a++) { if (l->meline[a] != nullptr) @@ -1578,7 +1602,8 @@ void vpMbEdgeTracker::initMovingEdge(const vpImage &I, const vpHo if (cy->isTracked()) cy->initMovingEdge(I, _cMo, doNotTrack, m_mask); } - } else { + } + else { cy->setVisible(false); if (cy->meline1 != nullptr) delete cy->meline1; @@ -1611,7 +1636,8 @@ void vpMbEdgeTracker::initMovingEdge(const vpImage &I, const vpHo if (ci->isTracked()) ci->initMovingEdge(I, _cMo, doNotTrack, m_mask); } - } else { + } + else { ci->setVisible(false); if (ci->meEllipse != nullptr) delete ci->meEllipse; @@ -2059,7 +2085,7 @@ void vpMbEdgeTracker::addCircle(const vpPoint &P1, const vpPoint &P2, const vpPo if ((samePoint(*(ci->p1), P1) && samePoint(*(ci->p2), P2) && samePoint(*(ci->p3), P3)) || (samePoint(*(ci->p1), P1) && samePoint(*(ci->p2), P3) && samePoint(*(ci->p3), P2))) { already_here = - (std::fabs(ci->radius - r) < std::numeric_limits::epsilon() * vpMath::maximum(ci->radius, r)); + (std::fabs(ci->radius - r) < std::numeric_limits::epsilon() * vpMath::maximum(ci->radius, r)); } } @@ -2118,7 +2144,7 @@ void vpMbEdgeTracker::addCylinder(const vpPoint &P1, const vpPoint &P2, double r if ((samePoint(*(cy->p1), P1) && samePoint(*(cy->p2), P2)) || (samePoint(*(cy->p1), P2) && samePoint(*(cy->p2), P1))) { already_here = - (std::fabs(cy->radius - r) < std::numeric_limits::epsilon() * vpMath::maximum(cy->radius, r)); + (std::fabs(cy->radius - r) < std::numeric_limits::epsilon() * vpMath::maximum(cy->radius, r)); } } @@ -2220,7 +2246,8 @@ void vpMbEdgeTracker::visibleFace(const vpImage &I, const vpHomog // n = faces.setVisible(_I.getWidth(), I.getHeight(), m_cam, cMo, vpMath::rad(89), vpMath::rad(89), // changed); n = faces.setVisible(I.getWidth(), I.getHeight(), m_cam, cMo, angleAppears, angleDisappears, changed); - } else { + } + else { #ifdef VISP_HAVE_OGRE n = faces.setVisibleOgre(I.getWidth(), I.getHeight(), m_cam, cMo, angleAppears, angleDisappears, changed); #else @@ -2231,7 +2258,8 @@ void vpMbEdgeTracker::visibleFace(const vpImage &I, const vpHomog if (n > nbvisiblepolygone) { // cout << "une nouvelle face est visible " << endl; newvisibleline = true; - } else + } + else newvisibleline = false; nbvisiblepolygone = n; @@ -2518,9 +2546,9 @@ unsigned int vpMbEdgeTracker::getNbPoints(unsigned int level) const if (l->nbFeature[a] != 0) for (std::list::const_iterator itme = l->meline[a]->getMeList().begin(); itme != l->meline[a]->getMeList().end(); ++itme) { - if (itme->getState() == vpMeSite::NO_SUPPRESSION) - nbGoodPoints++; - } + if (itme->getState() == vpMeSite::NO_SUPPRESSION) + nbGoodPoints++; + } } } } @@ -2602,7 +2630,8 @@ void vpMbEdgeTracker::setScales(const std::vector &scale) circles.resize(1); circles[0].clear(); - } else { + } + else { this->scales = scale; lines.resize(scale.size()); @@ -2626,12 +2655,12 @@ void vpMbEdgeTracker::setFarClippingDistance(const double &dist) { if ((clippingFlag & vpPolygon3D::NEAR_CLIPPING) == vpPolygon3D::NEAR_CLIPPING && dist <= distNearClip) std::cerr << "Far clipping value cannot be inferior than near clipping " - "value. Far clipping won't be considered." - << std::endl; + "value. Far clipping won't be considered." + << std::endl; else if (dist < 0) std::cerr << "Far clipping value cannot be inferior than 0. Far clipping " - "won't be considered." - << std::endl; + "won't be considered." + << std::endl; else { vpMbTracker::setFarClippingDistance(dist); vpMbtDistanceLine *l; @@ -2656,12 +2685,12 @@ void vpMbEdgeTracker::setNearClippingDistance(const double &dist) { if ((clippingFlag & vpPolygon3D::FAR_CLIPPING) == vpPolygon3D::FAR_CLIPPING && dist >= distFarClip) std::cerr << "Near clipping value cannot be superior than far clipping " - "value. Near clipping won't be considered." - << std::endl; + "value. Near clipping won't be considered." + << std::endl; else if (dist < 0) std::cerr << "Near clipping value cannot be inferior than 0. Near " - "clipping won't be considered." - << std::endl; + "clipping won't be considered." + << std::endl; else { vpMbTracker::setNearClippingDistance(dist); vpMbtDistanceLine *l; @@ -2722,7 +2751,8 @@ void vpMbEdgeTracker::initPyramid(const vpImage &_I, if (scales[0]) { _pyramid[0] = &_I; - } else { + } + else { _pyramid[0] = nullptr; } @@ -2736,7 +2766,8 @@ void vpMbEdgeTracker::initPyramid(const vpImage &_I, } } _pyramid[i] = I; - } else { + } + else { _pyramid[i] = nullptr; } } diff --git a/modules/tracker/mbt/src/hybrid/vpMbEdgeKltTracker.cpp b/modules/tracker/mbt/src/hybrid/vpMbEdgeKltTracker.cpp index 911fc9692c..712c2c5703 100644 --- a/modules/tracker/mbt/src/hybrid/vpMbEdgeKltTracker.cpp +++ b/modules/tracker/mbt/src/hybrid/vpMbEdgeKltTracker.cpp @@ -273,7 +273,8 @@ unsigned int vpMbEdgeKltTracker::initMbtTracking(unsigned int lvl) */ void vpMbEdgeKltTracker::loadConfigFile(const std::string &configFile, bool verbose) { - // Load projection error config +#if defined(VISP_HAVE_PUGIXML) +// Load projection error config vpMbTracker::loadConfigFile(configFile, verbose); vpMbtXmlGenericParser xmlp(vpMbtXmlGenericParser::EDGE_PARSER | vpMbtXmlGenericParser::KLT_PARSER); @@ -348,6 +349,11 @@ void vpMbEdgeKltTracker::loadConfigFile(const std::string &configFile, bool verb // if(useScanLine) faces.getMbScanLineRenderer().setMaskBorder(maskBorder); +#else + (void)configFile; + (void)verbose; + throw(vpException(vpException::ioError, "vpMbEdgeKltTracker::loadConfigFile() needs pugixml built-in 3rdparty")); +#endif } /*! diff --git a/modules/tracker/mbt/src/klt/vpMbKltTracker.cpp b/modules/tracker/mbt/src/klt/vpMbKltTracker.cpp index 944a4c1ee8..eb5b1b9a2c 100644 --- a/modules/tracker/mbt/src/klt/vpMbKltTracker.cpp +++ b/modules/tracker/mbt/src/klt/vpMbKltTracker.cpp @@ -107,9 +107,9 @@ vpMatrix homography2collineation(const vpMatrix &H, const vpCameraParameters &ca vpMbKltTracker::vpMbKltTracker() : - cur(), c0Mo(), firstInitialisation(true), maskBorder(5), threshold_outlier(0.5), percentGood(0.6), ctTc0(), tracker(), - kltPolygons(), kltCylinders(), circles_disp(), m_nbInfos(0), m_nbFaceUsed(0), m_L_klt(), m_error_klt(), m_w_klt(), - m_weightedError_klt(), m_robust_klt(), m_featuresToBeDisplayedKlt() + cur(), c0Mo(), firstInitialisation(true), maskBorder(5), threshold_outlier(0.5), percentGood(0.6), ctTc0(), tracker(), + kltPolygons(), kltCylinders(), circles_disp(), m_nbInfos(0), m_nbFaceUsed(0), m_L_klt(), m_error_klt(), m_w_klt(), + m_weightedError_klt(), m_robust_klt(), m_featuresToBeDisplayedKlt() { tracker.setTrackerId(1); tracker.setUseHarris(1); @@ -218,7 +218,8 @@ void vpMbKltTracker::reinit(const vpImage &I) vpMbtDistanceKltCylinder *kltPolyCylinder; if (useScanLine) { vpImageConvert::convert(faces.getMbScanLineRenderer().getMask(), mask); - } else { + } + else { unsigned char val = 255 /* - i*15*/; for (std::list::const_iterator it = kltPolygons.begin(); it != kltPolygons.end(); ++it) { kltpoly = *it; @@ -443,16 +444,18 @@ void vpMbKltTracker::setPose(const vpImage *const I, const vpImag if (!kltCylinders.empty()) { std::cout << "WARNING: Cannot set pose when model contains cylinder(s). " - "This feature is not implemented yet." - << std::endl; + "This feature is not implemented yet." + << std::endl; std::cout << "Tracker will be reinitialized with the given pose." << std::endl; m_cMo = cdMo; if (I) { init(*I); - } else { + } + else { init(m_I); } - } else { + } + else { vpMbtDistanceKltPoints *kltpoly; std::vector init_pts; @@ -543,7 +546,8 @@ void vpMbKltTracker::setPose(const vpImage *const I, const vpImag if (I) { vpImageConvert::convert(*I, cur); - } else { + } + else { vpImageConvert::convert(m_I, cur); } @@ -553,22 +557,26 @@ void vpMbKltTracker::setPose(const vpImage *const I, const vpImag if (!useOgre) { if (I) { faces.setVisible(I->getWidth(), I->getHeight(), m_cam, cdMo, angleAppears, angleDisappears, reInitialisation); - } else { + } + else { faces.setVisible(m_I.getWidth(), m_I.getHeight(), m_cam, cdMo, angleAppears, angleDisappears, reInitialisation); } - } else { + } + else { #ifdef VISP_HAVE_OGRE if (I) { faces.setVisibleOgre(I->getWidth(), I->getHeight(), m_cam, cdMo, angleAppears, angleDisappears, reInitialisation); - } else { + } + else { faces.setVisibleOgre(m_I.getWidth(), m_I.getHeight(), m_cam, cdMo, angleAppears, angleDisappears, reInitialisation); } #else if (I) { faces.setVisible(I->getWidth(), I->getHeight(), m_cam, cdMo, angleAppears, angleDisappears, reInitialisation); - } else { + } + else { faces.setVisible(m_I.getWidth(), m_I.getHeight(), m_cam, cdMo, angleAppears, angleDisappears, reInitialisation); } #endif @@ -748,7 +756,8 @@ bool vpMbKltTracker::postTracking(const vpImage &I, vpColVector & // std::cout << "Too many point disappear : " << initialNumber << "/" // << currentNumber << std::endl; reInitialisation = true; - } else { + } + else { if (!useOgre) faces.setVisible(I.getWidth(), I.getHeight(), m_cam, m_cMo, angleAppears, angleDisappears, reInitialisation); else { @@ -873,7 +882,8 @@ void vpMbKltTracker::computeVVSInteractionMatrixAndResidu() try { kltpoly->computeHomography(ctTc0, H); kltpoly->computeInteractionMatrixAndResidu(subR, subL); - } catch (...) { + } + catch (...) { throw vpTrackingException(vpTrackingException::fatalError, "Cannot compute interaction matrix"); } @@ -891,7 +901,8 @@ void vpMbKltTracker::computeVVSInteractionMatrixAndResidu() try { kltPolyCylinder->computeInteractionMatrixAndResidu(ctTc0, subR, subL); - } catch (...) { + } + catch (...) { throw vpTrackingException(vpTrackingException::fatalError, "Cannot compute interaction matrix"); } @@ -992,7 +1003,8 @@ void vpMbKltTracker::track(const vpImage &I_color) */ void vpMbKltTracker::loadConfigFile(const std::string &configFile, bool verbose) { - // Load projection error config +#if defined(VISP_HAVE_PUGIXML) +// Load projection error config vpMbTracker::loadConfigFile(configFile, verbose); vpMbtXmlGenericParser xmlp(vpMbtXmlGenericParser::KLT_PARSER); @@ -1013,7 +1025,8 @@ void vpMbKltTracker::loadConfigFile(const std::string &configFile, bool verbose) std::cout << " *********** Parsing XML for MBT KLT Tracker ************ " << std::endl; } xmlp.parse(configFile.c_str()); - } catch (...) { + } + catch (...) { vpERROR_TRACE("Can't open XML file \"%s\"\n ", configFile.c_str()); throw vpException(vpException::ioError, "problem to parse configuration file."); } @@ -1056,6 +1069,11 @@ void vpMbKltTracker::loadConfigFile(const std::string &configFile, bool verbose) setMinLineLengthThresh(minLineLengthThresholdGeneral); setMinPolygonAreaThresh(minPolygonAreaThresholdGeneral); } +#else + (void)configFile; + (void)verbose; + throw(vpException(vpException::ioError, "vpMbKltTracker::loadConfigFile() needs pugixml built-in 3rdparty")); +#endif } /*! @@ -1074,14 +1092,15 @@ void vpMbKltTracker::display(const vpImage &I, const vpHomogeneou bool displayFullModel) { std::vector > models = - vpMbKltTracker::getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel); + vpMbKltTracker::getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel); for (size_t i = 0; i < models.size(); i++) { if (vpMath::equal(models[i][0], 0)) { vpImagePoint ip1(models[i][1], models[i][2]); vpImagePoint ip2(models[i][3], models[i][4]); vpDisplay::displayLine(I, ip1, ip2, col, thickness); - } else if (vpMath::equal(models[i][0], 1)) { + } + else if (vpMath::equal(models[i][0], 1)) { vpImagePoint center(models[i][1], models[i][2]); double n20 = models[i][3]; double n11 = models[i][4]; @@ -1126,14 +1145,15 @@ void vpMbKltTracker::display(const vpImage &I, const vpHomogeneousMatrix const vpColor &col, unsigned int thickness, bool displayFullModel) { std::vector > models = - vpMbKltTracker::getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel); + vpMbKltTracker::getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel); for (size_t i = 0; i < models.size(); i++) { if (vpMath::equal(models[i][0], 0)) { vpImagePoint ip1(models[i][1], models[i][2]); vpImagePoint ip2(models[i][3], models[i][4]); vpDisplay::displayLine(I, ip1, ip2, col, thickness); - } else if (vpMath::equal(models[i][0], 1)) { + } + else if (vpMath::equal(models[i][0], 1)) { vpImagePoint center(models[i][1], models[i][2]); double n20 = models[i][3]; double n11 = models[i][4]; @@ -1443,5 +1463,5 @@ void vpMbKltTracker::setUseKltTracking(const std::string &name, const bool &useK #elif !defined(VISP_BUILD_SHARED_LIBS) // Work around to avoid warning: libvisp_mbt.a(vpMbKltTracker.cpp.o) has no // symbols -void dummy_vpMbKltTracker(){}; +void dummy_vpMbKltTracker() { }; #endif // VISP_HAVE_OPENCV diff --git a/modules/tracker/mbt/src/vpMbGenericTracker.cpp b/modules/tracker/mbt/src/vpMbGenericTracker.cpp index 830ff1577b..766c3f92d7 100644 --- a/modules/tracker/mbt/src/vpMbGenericTracker.cpp +++ b/modules/tracker/mbt/src/vpMbGenericTracker.cpp @@ -6514,6 +6514,7 @@ void vpMbGenericTracker::TrackerWrapper::initMbtTracking(const vpImage &I, void vpMbTracker::loadConfigFile(const std::string &configFile, bool verbose) { +#if defined(VISP_HAVE_PUGIXML) vpMbtXmlGenericParser xmlp(vpMbtXmlGenericParser::PROJECTION_ERROR_PARSER); xmlp.setVerbose(verbose); xmlp.setProjectionErrorMe(m_projectionErrorMe); @@ -3799,6 +3800,11 @@ void vpMbTracker::loadConfigFile(const std::string &configFile, bool verbose) setProjectionErrorMovingEdge(meParser); setProjectionErrorKernelSize(xmlp.getProjectionErrorKernelSize()); +#else + (void)configFile; + (void)verbose; + throw(vpException(vpException::ioError, "vpMbTracker::loadConfigFile() needs pugixml built-in 3rdparty")); +#endif } /*! diff --git a/modules/tracker/mbt/src/vpMbtXmlGenericParser.cpp b/modules/tracker/mbt/src/vpMbtXmlGenericParser.cpp index c3707d7735..faf8cee7a3 100644 --- a/modules/tracker/mbt/src/vpMbtXmlGenericParser.cpp +++ b/modules/tracker/mbt/src/vpMbtXmlGenericParser.cpp @@ -40,6 +40,7 @@ #include +#if defined(VISP_HAVE_PUGIXML) #include #ifndef DOXYGEN_SHOULD_SKIP_THIS @@ -1915,3 +1916,9 @@ void vpMbtXmlGenericParser::setProjectionErrorKernelSize(const unsigned int &siz \param verbose : verbose flag */ void vpMbtXmlGenericParser::setVerbose(bool verbose) { m_impl->setVerbose(verbose); } + +#elif !defined(VISP_BUILD_SHARED_LIBS) +// Work around to avoid warning: libvisp_core.a(vpMbtXmlGenericParser.cpp.o) has no symbols +void dummy_vpMbtXmlGenericParser() { }; + +#endif diff --git a/modules/tracker/mbt/test/generic-with-dataset/perfGenericTracker.cpp b/modules/tracker/mbt/test/generic-with-dataset/perfGenericTracker.cpp index da5bb48cb8..94c13ca1a5 100644 --- a/modules/tracker/mbt/test/generic-with-dataset/perfGenericTracker.cpp +++ b/modules/tracker/mbt/test/generic-with-dataset/perfGenericTracker.cpp @@ -129,12 +129,65 @@ TEST_CASE("Benchmark generic tracker", "[benchmark]") const std::string input_directory = vpIoTools::createFilePath(vpIoTools::getViSPImagesDataPath(), "mbt-depth/Castle-simu"); + + const bool verbose = false; +#if defined(VISP_HAVE_PUGIXML) const std::string configFileCam1 = input_directory + std::string("/Config/chateau.xml"); const std::string configFileCam2 = input_directory + std::string("/Config/chateau_depth.xml"); REQUIRE(vpIoTools::checkFilename(configFileCam1)); REQUIRE(vpIoTools::checkFilename(configFileCam2)); - const bool verbose = false; tracker.loadConfigFile(configFileCam1, configFileCam2, verbose); +#else + { + vpCameraParameters cam_color, cam_depth; + cam_color.initPersProjWithoutDistortion(700.0, 700.0, 320.0, 240.0); + cam_depth.initPersProjWithoutDistortion(700.0, 700.0, 320.0, 240.0); + tracker.setCameraParameters(cam_color, cam_depth); + } + + // Edge + vpMe me; + me.setMaskSize(5); + me.setMaskNumber(180); + me.setRange(8); + me.setLikelihoodThresholdType(vpMe::NORMALIZED_THRESHOLD); + me.setThreshold(5); + me.setMu1(0.5); + me.setMu2(0.5); + me.setSampleStep(5); + tracker.setMovingEdge(me); + + // Klt +#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO) + vpKltOpencv klt; + tracker.setKltMaskBorder(5); + klt.setMaxFeatures(10000); + klt.setWindowSize(5); + klt.setQuality(0.01); + klt.setMinDistance(5); + klt.setHarrisFreeParameter(0.02); + klt.setBlockSize(3); + klt.setPyramidLevels(3); + + tracker.setKltOpencv(klt); +#endif + + // Depth + tracker.setDepthNormalFeatureEstimationMethod(vpMbtFaceDepthNormal::ROBUST_FEATURE_ESTIMATION); + tracker.setDepthNormalPclPlaneEstimationMethod(2); + tracker.setDepthNormalPclPlaneEstimationRansacMaxIter(200); + tracker.setDepthNormalPclPlaneEstimationRansacThreshold(0.001); + tracker.setDepthNormalSamplingStep(2, 2); + + tracker.setDepthDenseSamplingStep(4, 4); + + tracker.setAngleAppear(vpMath::rad(85.0)); + tracker.setAngleDisappear(vpMath::rad(89.0)); + tracker.setNearClippingDistance(0.01); + tracker.setFarClippingDistance(2.0); + tracker.setClipping(tracker.getClipping() | vpMbtPolygon::FOV_CLIPPING); +#endif + REQUIRE(vpIoTools::checkFilename(input_directory + "/Models/chateau.cao")); tracker.loadModel(input_directory + "/Models/chateau.cao", input_directory + "/Models/chateau.cao", verbose); @@ -226,7 +279,58 @@ TEST_CASE("Benchmark generic tracker", "[benchmark]") tracker.setTrackerType(mapOfTrackerTypes[idx]); const bool verbose = false; +#if defined(VISP_HAVE_PUGIXML) tracker.loadConfigFile(configFileCam1, configFileCam2, verbose); +#else + { + vpCameraParameters cam_color, cam_depth; + cam_color.initPersProjWithoutDistortion(700.0, 700.0, 320.0, 240.0); + cam_depth.initPersProjWithoutDistortion(700.0, 700.0, 320.0, 240.0); + tracker.setCameraParameters(cam_color, cam_depth); + } + + // Edge + vpMe me; + me.setMaskSize(5); + me.setMaskNumber(180); + me.setRange(8); + me.setLikelihoodThresholdType(vpMe::NORMALIZED_THRESHOLD); + me.setThreshold(5); + me.setMu1(0.5); + me.setMu2(0.5); + me.setSampleStep(5); + tracker.setMovingEdge(me); + + // Klt +#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO) + vpKltOpencv klt; + tracker.setKltMaskBorder(5); + klt.setMaxFeatures(10000); + klt.setWindowSize(5); + klt.setQuality(0.01); + klt.setMinDistance(5); + klt.setHarrisFreeParameter(0.02); + klt.setBlockSize(3); + klt.setPyramidLevels(3); + + tracker.setKltOpencv(klt); +#endif + + // Depth + tracker.setDepthNormalFeatureEstimationMethod(vpMbtFaceDepthNormal::ROBUST_FEATURE_ESTIMATION); + tracker.setDepthNormalPclPlaneEstimationMethod(2); + tracker.setDepthNormalPclPlaneEstimationRansacMaxIter(200); + tracker.setDepthNormalPclPlaneEstimationRansacThreshold(0.001); + tracker.setDepthNormalSamplingStep(2, 2); + + tracker.setDepthDenseSamplingStep(4, 4); + + tracker.setAngleAppear(vpMath::rad(85.0)); + tracker.setAngleDisappear(vpMath::rad(89.0)); + tracker.setNearClippingDistance(0.01); + tracker.setFarClippingDistance(2.0); + tracker.setClipping(tracker.getClipping() | vpMbtPolygon::FOV_CLIPPING); +#endif tracker.loadModel(input_directory + "/Models/chateau.cao", input_directory + "/Models/chateau.cao", verbose); tracker.loadModel(input_directory + "/Models/cube.cao", verbose, T); tracker.initFromPose(images.front(), cMo_truth_all.front()); diff --git a/modules/tracker/mbt/test/generic-with-dataset/testGenericTracker.cpp b/modules/tracker/mbt/test/generic-with-dataset/testGenericTracker.cpp index 03273fbce1..03087c034b 100644 --- a/modules/tracker/mbt/test/generic-with-dataset/testGenericTracker.cpp +++ b/modules/tracker/mbt/test/generic-with-dataset/testGenericTracker.cpp @@ -275,12 +275,14 @@ bool run(const std::string &input_directory, bool opt_click_allowed, bool opt_di tracker_type[0] = trackerType_image; tracker_type[1] = vpMbGenericTracker::DEPTH_DENSE_TRACKER; vpMbGenericTracker tracker(tracker_type); + +#if defined(VISP_HAVE_PUGIXML) std::string configFileCam1 = input_directory + std::string("/Config/chateau.xml"); std::string configFileCam2 = input_directory + std::string("/Config/chateau_depth.xml"); std::cout << "Load config file for camera 1: " << configFileCam1 << std::endl; std::cout << "Load config file for camera 2: " << configFileCam2 << std::endl; tracker.loadConfigFile(configFileCam1, configFileCam2); -#if 0 +#else // Corresponding parameters manually set to have an example code { vpCameraParameters cam_color, cam_depth; diff --git a/modules/tracker/mbt/test/generic-with-dataset/testGenericTrackerDepth.cpp b/modules/tracker/mbt/test/generic-with-dataset/testGenericTrackerDepth.cpp index 23af518084..234eeebc29 100644 --- a/modules/tracker/mbt/test/generic-with-dataset/testGenericTrackerDepth.cpp +++ b/modules/tracker/mbt/test/generic-with-dataset/testGenericTrackerDepth.cpp @@ -248,8 +248,10 @@ bool run(vpImage &I, const std::string &input_directory, bool opt_click_al std::vector tracker_type; tracker_type.push_back(vpMbGenericTracker::DEPTH_DENSE_TRACKER); vpMbGenericTracker tracker(tracker_type); + +#if defined(VISP_HAVE_PUGIXML) tracker.loadConfigFile(input_directory + "/Config/chateau_depth.xml"); -#if 0 +#else // Corresponding parameters manually set to have an example code { vpCameraParameters cam_depth; diff --git a/modules/tracker/mbt/test/generic-with-dataset/testGenericTrackerDeterminist.cpp b/modules/tracker/mbt/test/generic-with-dataset/testGenericTrackerDeterminist.cpp index a6f303aaf8..3e23e77981 100644 --- a/modules/tracker/mbt/test/generic-with-dataset/testGenericTrackerDeterminist.cpp +++ b/modules/tracker/mbt/test/generic-with-dataset/testGenericTrackerDeterminist.cpp @@ -89,8 +89,48 @@ void configureTracker(vpMbGenericTracker &tracker, vpCameraParameters &cam) const std::string env_ipath = vpIoTools::getViSPImagesDataPath(); const std::string configFile = vpIoTools::createFilePath(env_ipath, "mbt/cube.xml"); const std::string modelFile = vpIoTools::createFilePath(env_ipath, "mbt/cube_and_cylinder.cao"); +#if defined(VISP_HAVE_PUGIXML) const bool verbose = false; tracker.loadConfigFile(configFile, verbose); +#else + // Corresponding parameters manually set to have an example code + // By setting the parameters: + cam.initPersProjWithoutDistortion(547, 542, 338, 234); + + vpMe me; + me.setMaskSize(5); + me.setMaskNumber(180); + me.setRange(7); + me.setLikelihoodThresholdType(vpMe::NORMALIZED_THRESHOLD); + me.setThreshold(5); + me.setMu1(0.5); + me.setMu2(0.5); + me.setSampleStep(4); + + vpKltOpencv klt; + klt.setMaxFeatures(300); + klt.setWindowSize(5); + klt.setQuality(0.01); + klt.setMinDistance(5); + klt.setHarrisFreeParameter(0.01); + klt.setBlockSize(3); + klt.setPyramidLevels(3); + + tracker.setCameraParameters(cam); + tracker.setMovingEdge(me); + tracker.setKltOpencv(klt); + tracker.setKltMaskBorder(5); + tracker.setAngleAppear(vpMath::rad(65)); + tracker.setAngleDisappear(vpMath::rad(75)); + + // Specify the clipping to + tracker.setNearClippingDistance(0.01); + tracker.setFarClippingDistance(0.90); + tracker.setClipping(tracker.getClipping() | vpMbtPolygon::FOV_CLIPPING); + // tracker.setClipping(tracker.getClipping() | vpMbtPolygon::LEFT_CLIPPING | + // vpMbtPolygon::RIGHT_CLIPPING | vpMbtPolygon::UP_CLIPPING | + // vpMbtPolygon::DOWN_CLIPPING); // Equivalent to FOV_CLIPPING +#endif tracker.getCameraParameters(cam); tracker.loadModel(modelFile); tracker.setDisplayFeatures(true); diff --git a/modules/tracker/mbt/test/generic-with-dataset/testMbtXmlGenericParser.cpp b/modules/tracker/mbt/test/generic-with-dataset/testMbtXmlGenericParser.cpp index d541fbace8..c78650c1bd 100644 --- a/modules/tracker/mbt/test/generic-with-dataset/testMbtXmlGenericParser.cpp +++ b/modules/tracker/mbt/test/generic-with-dataset/testMbtXmlGenericParser.cpp @@ -44,7 +44,7 @@ int main() { -#if defined(VISP_HAVE_LAPACK) || defined(VISP_HAVE_EIGEN3) || defined(VISP_HAVE_OPENCV) +#if defined(VISP_HAVE_PUGIXML) && (defined(VISP_HAVE_LAPACK) || defined(VISP_HAVE_EIGEN3) || defined(VISP_HAVE_OPENCV)) std::string visp_images_dir = vpIoTools::getViSPImagesDataPath(); if (vpIoTools::checkDirectory(visp_images_dir + "/xml")) { double eps = std::numeric_limits::epsilon(); @@ -159,6 +159,8 @@ int main() } #elif !(defined(VISP_HAVE_LAPACK) || defined(VISP_HAVE_EIGEN3) || defined(VISP_HAVE_OPENCV)) std::cout << "Cannot run this example: install Lapack, Eigen3 or OpenCV" << std::endl; +#elif !(defined(VISP_HAVE_PUGIXML)) + std::cout << "Cannot run this example: enable pugixml built-in" << std::endl; #endif std::cout << "Test succeed" << std::endl; diff --git a/modules/vision/include/visp3/vision/vpXmlConfigParserKeyPoint.h b/modules/vision/include/visp3/vision/vpXmlConfigParserKeyPoint.h index e039258dbb..eaf7e67cfb 100644 --- a/modules/vision/include/visp3/vision/vpXmlConfigParserKeyPoint.h +++ b/modules/vision/include/visp3/vision/vpXmlConfigParserKeyPoint.h @@ -44,6 +44,7 @@ #include +#if defined(VISP_HAVE_PUGIXML) #include /*! @@ -198,3 +199,4 @@ class VISP_EXPORT vpXmlConfigParserKeyPoint Impl *m_impl; }; #endif +#endif diff --git a/modules/vision/src/key-point/vpKeyPoint.cpp b/modules/vision/src/key-point/vpKeyPoint.cpp index 994b18ec01..83d87ea345 100644 --- a/modules/vision/src/key-point/vpKeyPoint.cpp +++ b/modules/vision/src/key-point/vpKeyPoint.cpp @@ -39,7 +39,9 @@ #if defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_FEATURES2D) +#if defined(VISP_HAVE_PUGIXML) #include +#endif namespace { @@ -2414,6 +2416,7 @@ void vpKeyPoint::insertImageMatching(const vpImage &ICurrent, vpImage #include +#if defined(VISP_HAVE_PUGIXML) #include #include @@ -545,3 +546,9 @@ bool vpXmlConfigParserKeyPoint::getUseRansacVVSPoseEstimation() const { return m_impl->getUseRansacVVSPoseEstimation(); } + +#elif !defined(VISP_BUILD_SHARED_LIBS) +// Work around to avoid warning: libvisp_core.a(vpXmlConfigParserKeyPoint.cpp.o) has no symbols +void dummy_vpXmlConfigParserKeyPoint() { }; + +#endif diff --git a/modules/vision/test/keypoint-with-dataset/testKeyPoint-2.cpp b/modules/vision/test/keypoint-with-dataset/testKeyPoint-2.cpp index 8c0391e565..c9a67b3ee4 100644 --- a/modules/vision/test/keypoint-with-dataset/testKeyPoint-2.cpp +++ b/modules/vision/test/keypoint-with-dataset/testKeyPoint-2.cpp @@ -179,9 +179,10 @@ void run_test(const std::string &env_ipath, bool opt_click_allowed, bool opt_dis // Load config for tracker std::string tracker_config_file = vpIoTools::createFilePath(env_ipath, "mbt/cube.xml"); +#if defined(VISP_HAVE_PUGYXML) tracker.loadConfigFile(tracker_config_file); tracker.getCameraParameters(cam); -#if 0 +#else // Corresponding parameters manually set to have an example code vpMe me; me.setMaskSize(5); diff --git a/modules/vision/test/keypoint-with-dataset/testKeyPoint-4.cpp b/modules/vision/test/keypoint-with-dataset/testKeyPoint-4.cpp index 55282dc3f3..2dfd21d68f 100644 --- a/modules/vision/test/keypoint-with-dataset/testKeyPoint-4.cpp +++ b/modules/vision/test/keypoint-with-dataset/testKeyPoint-4.cpp @@ -180,9 +180,10 @@ void run_test(const std::string &env_ipath, bool opt_click_allowed, bool opt_dis // Load config for tracker std::string tracker_config_file = vpIoTools::createFilePath(env_ipath, "mbt/cube.xml"); +#if defined(VISP_HAVE_PUGYXML) tracker.loadConfigFile(tracker_config_file); tracker.getCameraParameters(cam); -#if 0 +#else // Corresponding parameters manually set to have an example code vpMe me; me.setMaskSize(5); diff --git a/modules/vision/test/keypoint-with-dataset/testKeyPoint-7.cpp b/modules/vision/test/keypoint-with-dataset/testKeyPoint-7.cpp index e1143220c8..253dd7ebad 100644 --- a/modules/vision/test/keypoint-with-dataset/testKeyPoint-7.cpp +++ b/modules/vision/test/keypoint-with-dataset/testKeyPoint-7.cpp @@ -149,43 +149,43 @@ bool compareKeyPoints(const std::vector &keypoints1, const std::ve for (size_t cpt = 0; cpt < keypoints1.size(); cpt++) { if (!vpMath::equal(keypoints1[cpt].angle, keypoints2[cpt].angle, std::numeric_limits::epsilon())) { std::cerr << std::fixed << std::setprecision(9) << "keypoints1[cpt].angle=" << keypoints1[cpt].angle - << " ; keypoints2[cpt].angle=" << keypoints2[cpt].angle << std::endl; + << " ; keypoints2[cpt].angle=" << keypoints2[cpt].angle << std::endl; return false; } if (keypoints1[cpt].class_id != keypoints2[cpt].class_id) { std::cerr << "keypoints1[cpt].class_id=" << keypoints1[cpt].class_id - << " ; keypoints2[cpt].class_id=" << keypoints2[cpt].class_id << std::endl; + << " ; keypoints2[cpt].class_id=" << keypoints2[cpt].class_id << std::endl; return false; } if (keypoints1[cpt].octave != keypoints2[cpt].octave) { std::cerr << "keypoints1[cpt].octave=" << keypoints1[cpt].octave - << " ; keypoints2[cpt].octave=" << keypoints2[cpt].octave << std::endl; + << " ; keypoints2[cpt].octave=" << keypoints2[cpt].octave << std::endl; return false; } if (!vpMath::equal(keypoints1[cpt].pt.x, keypoints2[cpt].pt.x, std::numeric_limits::epsilon())) { std::cerr << std::fixed << std::setprecision(9) << "keypoints1[cpt].pt.x=" << keypoints1[cpt].pt.x - << " ; keypoints2[cpt].pt.x=" << keypoints2[cpt].pt.x << std::endl; + << " ; keypoints2[cpt].pt.x=" << keypoints2[cpt].pt.x << std::endl; return false; } if (!vpMath::equal(keypoints1[cpt].pt.y, keypoints2[cpt].pt.y, std::numeric_limits::epsilon())) { std::cerr << std::fixed << std::setprecision(9) << "keypoints1[cpt].pt.y=" << keypoints1[cpt].pt.y - << " ; keypoints2[cpt].pt.y=" << keypoints2[cpt].pt.y << std::endl; + << " ; keypoints2[cpt].pt.y=" << keypoints2[cpt].pt.y << std::endl; return false; } if (!vpMath::equal(keypoints1[cpt].response, keypoints2[cpt].response, std::numeric_limits::epsilon())) { std::cerr << std::fixed << std::setprecision(9) << "keypoints1[cpt].response=" << keypoints1[cpt].response - << " ; keypoints2[cpt].response=" << keypoints2[cpt].response << std::endl; + << " ; keypoints2[cpt].response=" << keypoints2[cpt].response << std::endl; return false; } if (!vpMath::equal(keypoints1[cpt].size, keypoints2[cpt].size, std::numeric_limits::epsilon())) { std::cerr << std::fixed << std::setprecision(9) << "keypoints1[cpt].size=" << keypoints1[cpt].size - << " ; keypoints2[cpt].size=" << keypoints2[cpt].size << std::endl; + << " ; keypoints2[cpt].size=" << keypoints2[cpt].size << std::endl; return false; } } @@ -214,7 +214,7 @@ bool compareDescriptors(const cv::Mat &descriptors1, const cv::Mat &descriptors2 case CV_8U: if (descriptors1.at(i, j) != descriptors2.at(i, j)) { std::cerr << "descriptors1.at(i,j)=" << descriptors1.at(i, j) - << " ; descriptors2.at(i,j)=" << descriptors2.at(i, j) << std::endl; + << " ; descriptors2.at(i,j)=" << descriptors2.at(i, j) << std::endl; return false; } break; @@ -222,7 +222,7 @@ bool compareDescriptors(const cv::Mat &descriptors1, const cv::Mat &descriptors2 case CV_8S: if (descriptors1.at(i, j) != descriptors2.at(i, j)) { std::cerr << "descriptors1.at(i,j)=" << descriptors1.at(i, j) - << " ; descriptors2.at(i,j)=" << descriptors2.at(i, j) << std::endl; + << " ; descriptors2.at(i,j)=" << descriptors2.at(i, j) << std::endl; return false; } break; @@ -230,7 +230,7 @@ bool compareDescriptors(const cv::Mat &descriptors1, const cv::Mat &descriptors2 case CV_16U: if (descriptors1.at(i, j) != descriptors2.at(i, j)) { std::cerr << "descriptors1.at(i,j)=" << descriptors1.at(i, j) - << " ; descriptors2.at(i,j)=" << descriptors2.at(i, j) << std::endl; + << " ; descriptors2.at(i,j)=" << descriptors2.at(i, j) << std::endl; return false; } break; @@ -238,7 +238,7 @@ bool compareDescriptors(const cv::Mat &descriptors1, const cv::Mat &descriptors2 case CV_16S: if (descriptors1.at(i, j) != descriptors2.at(i, j)) { std::cerr << "descriptors1.at(i,j)=" << descriptors1.at(i, j) - << " ; descriptors2.at(i,j)=" << descriptors2.at(i, j) << std::endl; + << " ; descriptors2.at(i,j)=" << descriptors2.at(i, j) << std::endl; return false; } break; @@ -246,7 +246,7 @@ bool compareDescriptors(const cv::Mat &descriptors1, const cv::Mat &descriptors2 case CV_32S: if (descriptors1.at(i, j) != descriptors2.at(i, j)) { std::cerr << "descriptors1.at(i,j)=" << descriptors1.at(i, j) - << " ; descriptors2.at(i,j)=" << descriptors2.at(i, j) << std::endl; + << " ; descriptors2.at(i,j)=" << descriptors2.at(i, j) << std::endl; return false; } break; @@ -255,8 +255,8 @@ bool compareDescriptors(const cv::Mat &descriptors1, const cv::Mat &descriptors2 if (!vpMath::equal(descriptors1.at(i, j), descriptors2.at(i, j), std::numeric_limits::epsilon())) { std::cerr << std::fixed << std::setprecision(9) - << "descriptors1.at(i,j)=" << descriptors1.at(i, j) - << " ; descriptors2.at(i,j)=" << descriptors2.at(i, j) << std::endl; + << "descriptors1.at(i,j)=" << descriptors1.at(i, j) + << " ; descriptors2.at(i,j)=" << descriptors2.at(i, j) << std::endl; return false; } break; @@ -265,8 +265,8 @@ bool compareDescriptors(const cv::Mat &descriptors1, const cv::Mat &descriptors2 if (!vpMath::equal(descriptors1.at(i, j), descriptors2.at(i, j), std::numeric_limits::epsilon())) { std::cerr << std::fixed << std::setprecision(17) - << "descriptors1.at(i,j)=" << descriptors1.at(i, j) - << " ; descriptors2.at(i,j)=" << descriptors2.at(i, j) << std::endl; + << "descriptors1.at(i,j)=" << descriptors1.at(i, j) + << " ; descriptors2.at(i,j)=" << descriptors2.at(i, j) << std::endl; return false; } break; @@ -325,7 +325,9 @@ template void run_test(const std::string &env_ipath, const std:: // Test if read is ok vpKeyPoint read_keypoint1; + read_keypoint1.loadLearningData(filename, true); + std::vector trainKeyPoints_read; read_keypoint1.getTrainKeyPoints(trainKeyPoints_read); cv::Mat trainDescriptors_read = read_keypoint1.getTrainDescriptors(); @@ -341,7 +343,7 @@ template void run_test(const std::string &env_ipath, const std:: "binary with train images saved !"); } - // Save in binary with no training images + // Save in binary without training images filename = vpIoTools::createFilePath(opath, "bin_without_img"); vpIoTools::makeDirectory(filename); filename = vpIoTools::createFilePath(filename, "test_save_in_bin_without_img.bin"); @@ -372,6 +374,7 @@ template void run_test(const std::string &env_ipath, const std:: "binary without train images !"); } +#if defined(VISP_HAVE_PUGYXML) // Save in xml with training images filename = vpIoTools::createFilePath(opath, "xml_with_img"); vpIoTools::makeDirectory(filename); @@ -433,7 +436,7 @@ template void run_test(const std::string &env_ipath, const std:: "learning file saved in " "xml without train images saved !"); } - +#endif std::cout << "Saving / loading learning files with binary descriptor are ok !" << std::endl; } @@ -524,6 +527,7 @@ template void run_test(const std::string &env_ipath, const std:: "binary without train images saved !"); } +#if defined(VISP_HAVE_PUGYXML) // Save in xml with training images filename = vpIoTools::createFilePath(opath, "xml_with_img"); vpIoTools::makeDirectory(filename); @@ -585,10 +589,8 @@ template void run_test(const std::string &env_ipath, const std:: "learning file saved in " "xml without train images saved !"); } - - std::cout << "Saving / loading learning files with floating point " - "descriptor are ok !" - << std::endl; +#endif + std::cout << "Saving / loading learning files with floating point descriptor are ok !" << std::endl; // Test vpKeyPoint::reset() vpKeyPoint keypoint_reset; @@ -623,8 +625,8 @@ template void run_test(const std::string &env_ipath, const std:: } std::cout << "vpKeyPoint::reset() is ok with trainKeyPoints and " - "trainDescriptors !" - << std::endl; + "trainDescriptors !" + << std::endl; #endif // OpenCV != 4.5.4 on macOS } #endif @@ -688,7 +690,8 @@ int main(int argc, const char **argv) run_test(env_ipath, opath, I); } - } catch (const vpException &e) { + } + catch (const vpException &e) { std::cerr << e.what() << std::endl; return EXIT_FAILURE; } diff --git a/modules/vision/test/keypoint-with-dataset/testXmlConfigParserKeyPoint.cpp b/modules/vision/test/keypoint-with-dataset/testXmlConfigParserKeyPoint.cpp index 9f5116eae4..549f5a4c7a 100644 --- a/modules/vision/test/keypoint-with-dataset/testXmlConfigParserKeyPoint.cpp +++ b/modules/vision/test/keypoint-with-dataset/testXmlConfigParserKeyPoint.cpp @@ -42,6 +42,7 @@ int main() { +#if defined(VISP_HAVE_PUGIXML) std::string visp_images_dir = vpIoTools::getViSPImagesDataPath(); if (vpIoTools::checkDirectory(visp_images_dir + "/xml")) { double eps = std::numeric_limits::epsilon(); @@ -76,6 +77,6 @@ int main() } } } - +#endif return EXIT_SUCCESS; } diff --git a/tutorial/computer-vision/tutorial-pose-from-planar-object.cpp b/tutorial/computer-vision/tutorial-pose-from-planar-object.cpp index 5dc9ef13b7..9606151915 100644 --- a/tutorial/computer-vision/tutorial-pose-from-planar-object.cpp +++ b/tutorial/computer-vision/tutorial-pose-from-planar-object.cpp @@ -21,7 +21,7 @@ #include // Check if std:c++17 or higher -#if ((__cplusplus >= 201703L) || (defined(_MSVC_LANG) && (_MSVC_LANG >= 201703L))) && defined(VISP_HAVE_DISPLAY) +#if ((__cplusplus >= 201703L) || (defined(_MSVC_LANG) && (_MSVC_LANG >= 201703L))) && defined(VISP_HAVE_DISPLAY) && defined(VISP_HAVE_PUGIXML) // Local helper namespace @@ -340,7 +340,7 @@ int main(int, char *argv[]) { #if ((__cplusplus >= 201703L) || (defined(_MSVC_LANG) && (_MSVC_LANG >= 201703L))) -#if defined(VISP_HAVE_DISPLAY) +#if defined(VISP_HAVE_DISPLAY) && defined(VISP_HAVE_PUGIXML) // Get prior data //! [Prior_Data] @@ -459,7 +459,7 @@ int main(int, char *argv[]) #else (void)argv; - std::cout << "There is no display available to run this tutorial." << std::endl; + std::cout << "There is no display and pugixml available to run this tutorial." << std::endl; #endif // defined(VISP_HAVE_DISPLAY) #else (void)argv; diff --git a/tutorial/computer-vision/tutorial-pose-from-points-live.cpp b/tutorial/computer-vision/tutorial-pose-from-points-live.cpp index a28d490b74..b6c99b2c54 100644 --- a/tutorial/computer-vision/tutorial-pose-from-points-live.cpp +++ b/tutorial/computer-vision/tutorial-pose-from-points-live.cpp @@ -30,7 +30,7 @@ int main(int argc, char **argv) { -#if (defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI) || defined(VISP_HAVE_OPENCV)) && \ +#if (defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI) || defined(VISP_HAVE_OPENCV)) && defined(VISP_HAVE_PUGIXML) && \ (defined(VISP_HAVE_V4L2) || defined(VISP_HAVE_DC1394) || defined(VISP_HAVE_CMU1394) || \ defined(HAVE_OPENCV_VIDEOIO) || defined(VISP_HAVE_FLYCAPTURE) || defined(VISP_HAVE_REALSENSE2)) try { diff --git a/tutorial/detection/object/tutorial-detection-object-mbt-deprecated.cpp b/tutorial/detection/object/tutorial-detection-object-mbt-deprecated.cpp index 7d9e4e4b99..391d81ba60 100644 --- a/tutorial/detection/object/tutorial-detection-object-mbt-deprecated.cpp +++ b/tutorial/detection/object/tutorial-detection-object-mbt-deprecated.cpp @@ -58,11 +58,13 @@ int main(int argc, char **argv) vpMbEdgeTracker tracker; bool usexml = false; +#if defined(VISP_HAVE_PUGIXML) if (vpIoTools::checkFilename(objectname + ".xml")) { tracker.loadConfigFile(objectname + ".xml"); tracker.getCameraParameters(cam); usexml = true; } +#endif if (!usexml) { vpMe me; me.setMaskSize(5); diff --git a/tutorial/detection/object/tutorial-detection-object-mbt.cpp b/tutorial/detection/object/tutorial-detection-object-mbt.cpp index 014ff2b484..d9d0ac470e 100644 --- a/tutorial/detection/object/tutorial-detection-object-mbt.cpp +++ b/tutorial/detection/object/tutorial-detection-object-mbt.cpp @@ -58,11 +58,13 @@ int main(int argc, char **argv) vpMbGenericTracker tracker(vpMbGenericTracker::EDGE_TRACKER); bool usexml = false; +#if defined(VISP_HAVE_PUGIXML) if (vpIoTools::checkFilename(objectname + ".xml")) { tracker.loadConfigFile(objectname + ".xml"); tracker.getCameraParameters(cam); usexml = true; } +#endif if (!usexml) { vpMe me; me.setMaskSize(5); diff --git a/tutorial/detection/object/tutorial-detection-object-mbt2-deprecated.cpp b/tutorial/detection/object/tutorial-detection-object-mbt2-deprecated.cpp index 8e55dddd85..e612206bb2 100644 --- a/tutorial/detection/object/tutorial-detection-object-mbt2-deprecated.cpp +++ b/tutorial/detection/object/tutorial-detection-object-mbt2-deprecated.cpp @@ -77,11 +77,13 @@ int main(int argc, char **argv) vpMbEdgeTracker tracker; bool usexml = false; +#if defined(VISP_HAVE_PUGIXML) if (vpIoTools::checkFilename(objectname + ".xml")) { tracker.loadConfigFile(objectname + ".xml"); tracker.getCameraParameters(cam); usexml = true; } +#endif if (!usexml) { vpMe me; me.setMaskSize(5); @@ -137,8 +139,8 @@ int main(int argc, char **argv) /* * Start the part of the code dedicated to object learning from 3 images */ - std::string imageName [] = { "cube0001.png", "cube0150.png", "cube0200.png" }; - vpHomogeneousMatrix initPoseTab [] = { + std::string imageName[] = { "cube0001.png", "cube0150.png", "cube0200.png" }; + vpHomogeneousMatrix initPoseTab[] = { vpHomogeneousMatrix(0.02143385294, 0.1098083886, 0.5127439561, 2.087159614, 1.141775176, -0.4701291124), vpHomogeneousMatrix(0.02651282185, -0.03713587374, 0.6873765919, 2.314744454, 0.3492296488, -0.1226054828), vpHomogeneousMatrix(0.02965448956, -0.07283091786, 0.7253526051, 2.300529617, -0.4286674806, 0.1788761025) }; diff --git a/tutorial/detection/object/tutorial-detection-object-mbt2.cpp b/tutorial/detection/object/tutorial-detection-object-mbt2.cpp index 5b825e6f59..bbe9259b55 100644 --- a/tutorial/detection/object/tutorial-detection-object-mbt2.cpp +++ b/tutorial/detection/object/tutorial-detection-object-mbt2.cpp @@ -77,11 +77,13 @@ int main(int argc, char **argv) vpMbGenericTracker tracker(vpMbGenericTracker::EDGE_TRACKER); bool usexml = false; +#if defined(VISP_HAVE_PUGIXML) if (vpIoTools::checkFilename(objectname + ".xml")) { tracker.loadConfigFile(objectname + ".xml"); tracker.getCameraParameters(cam); usexml = true; } +#endif if (!usexml) { vpMe me; me.setMaskSize(5); @@ -137,8 +139,8 @@ int main(int argc, char **argv) /* * Start the part of the code dedicated to object learning from 3 images */ - std::string imageName [] = { "cube0001.png", "cube0150.png", "cube0200.png" }; - vpHomogeneousMatrix initPoseTab [] = { + std::string imageName[] = { "cube0001.png", "cube0150.png", "cube0200.png" }; + vpHomogeneousMatrix initPoseTab[] = { vpHomogeneousMatrix(0.02143385294, 0.1098083886, 0.5127439561, 2.087159614, 1.141775176, -0.4701291124), vpHomogeneousMatrix(0.02651282185, -0.03713587374, 0.6873765919, 2.314744454, 0.3492296488, -0.1226054828), vpHomogeneousMatrix(0.02965448956, -0.07283091786, 0.7253526051, 2.300529617, -0.4286674806, 0.1788761025) }; diff --git a/tutorial/detection/tag/tutorial-apriltag-detector-live-T265-realsense.cpp b/tutorial/detection/tag/tutorial-apriltag-detector-live-T265-realsense.cpp index c2eaa1cc83..a19648d3be 100644 --- a/tutorial/detection/tag/tutorial-apriltag-detector-live-T265-realsense.cpp +++ b/tutorial/detection/tag/tutorial-apriltag-detector-live-T265-realsense.cpp @@ -10,7 +10,6 @@ #include #include #include -#include #include #include #include @@ -43,36 +42,46 @@ int main(int argc, const char **argv) for (int i = 1; i < argc; i++) { if (std::string(argv[i]) == "--pose_method" && i + 1 < argc) { poseEstimationMethod = (vpDetectorAprilTag::vpPoseEstimationMethod)atoi(argv[i + 1]); - } else if (std::string(argv[i]) == "--tag_size" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--tag_size" && i + 1 < argc) { tagSize = atof(argv[i + 1]); - } else if (std::string(argv[i]) == "--quad_decimate" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--quad_decimate" && i + 1 < argc) { quad_decimate = (float)atof(argv[i + 1]); - } else if (std::string(argv[i]) == "--nthreads" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--nthreads" && i + 1 < argc) { nThreads = atoi(argv[i + 1]); - } else if (std::string(argv[i]) == "--display_tag") { + } + else if (std::string(argv[i]) == "--display_tag") { display_tag = true; - } else if (std::string(argv[i]) == "--display_off") { + } + else if (std::string(argv[i]) == "--display_off") { display_off = true; - } else if (std::string(argv[i]) == "--color" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--color" && i + 1 < argc) { color_id = atoi(argv[i + 1]); - } else if (std::string(argv[i]) == "--thickness" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--thickness" && i + 1 < argc) { thickness = (unsigned int)atoi(argv[i + 1]); - } else if (std::string(argv[i]) == "--tag_family" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--tag_family" && i + 1 < argc) { tagFamily = (vpDetectorAprilTag::vpAprilTagFamily)atoi(argv[i + 1]); - } else if (std::string(argv[i]) == "--z_aligned") { + } + else if (std::string(argv[i]) == "--z_aligned") { align_frame = true; - } else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") { + } + else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") { std::cout << "Usage: " << argv[0] - << " [--tag_size (default: 0.053)]" - " [--quad_decimate (default: 1)]" - " [--nthreads (default: 1)]" - " [--pose_method (0: HOMOGRAPHY, 1: HOMOGRAPHY_VIRTUAL_VS, " - " 2: DEMENTHON_VIRTUAL_VS, 3: LAGRANGE_VIRTUAL_VS, " - " 4: BEST_RESIDUAL_VIRTUAL_VS, 5: HOMOGRAPHY_ORTHOGONAL_ITERATION) (default: 0)]" - " [--tag_family (0: TAG_36h11, 1: TAG_36h10 (DEPRECATED), 2: TAG_36ARTOOLKIT (DEPRECATED)," - " 3: TAG_25h9, 4: TAG_25h7 (DEPRECATED), 5: TAG_16h5, 6: TAG_CIRCLE21h7, 7: TAG_CIRCLE49h12," - " 8: TAG_CUSTOM48h12, 9: TAG_STANDARD41h12, 10: TAG_STANDARD52h13) (default: 0)]" - " [--display_tag] [--z_aligned]"; + << " [--tag_size (default: 0.053)]" + " [--quad_decimate (default: 1)]" + " [--nthreads (default: 1)]" + " [--pose_method (0: HOMOGRAPHY, 1: HOMOGRAPHY_VIRTUAL_VS, " + " 2: DEMENTHON_VIRTUAL_VS, 3: LAGRANGE_VIRTUAL_VS, " + " 4: BEST_RESIDUAL_VIRTUAL_VS, 5: HOMOGRAPHY_ORTHOGONAL_ITERATION) (default: 0)]" + " [--tag_family (0: TAG_36h11, 1: TAG_36h10 (DEPRECATED), 2: TAG_36ARTOOLKIT (DEPRECATED)," + " 3: TAG_25h9, 4: TAG_25h7 (DEPRECATED), 5: TAG_16h5, 6: TAG_CIRCLE21h7, 7: TAG_CIRCLE49h12," + " 8: TAG_CUSTOM48h12, 9: TAG_STANDARD41h12, 10: TAG_STANDARD52h13) (default: 0)]" + " [--display_tag] [--z_aligned]"; #if (defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI) || defined(VISP_HAVE_OPENCV)) std::cout << " [--display_off] [--color ] [--thickness ]"; #endif @@ -194,15 +203,16 @@ int main(int argc, const char **argv) std::cout << "Benchmark loop processing time" << std::endl; std::cout << "Mean / Median / Std: " << vpMath::getMean(time_vec) << " ms" - << " ; " << vpMath::getMedian(time_vec) << " ms" - << " ; " << vpMath::getStdev(time_vec) << " ms" << std::endl; + << " ; " << vpMath::getMedian(time_vec) << " ms" + << " ; " << vpMath::getStdev(time_vec) << " ms" << std::endl; if (!display_off) { delete display_left; delete display_undistort; } - } catch (const vpException &e) { + } + catch (const vpException &e) { std::cerr << "Catch an exception: " << e.getMessage() << std::endl; } @@ -214,7 +224,7 @@ int main(int argc, const char **argv) std::cout << "Enable Apriltag support, configure and build ViSP to run this tutorial" << std::endl; #elif defined(VISP_HAVE_REALSENSE2) && !(RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0)) std::cout << "Realsense T265 device needs librealsense API > 2.31.0. ViSP is linked with librealsense API " - << RS2_API_VERSION_STR << ". You need to upgrade librealsense to use this example." << std::endl; + << RS2_API_VERSION_STR << ". You need to upgrade librealsense to use this example." << std::endl; #else std::cout << "Install librealsense 3rd party, configure and build ViSP again to use this example." << std::endl; #endif diff --git a/tutorial/detection/tag/tutorial-apriltag-detector-live-rgbd-realsense.cpp b/tutorial/detection/tag/tutorial-apriltag-detector-live-rgbd-realsense.cpp index d721296063..a5cd897381 100644 --- a/tutorial/detection/tag/tutorial-apriltag-detector-live-rgbd-realsense.cpp +++ b/tutorial/detection/tag/tutorial-apriltag-detector-live-rgbd-realsense.cpp @@ -7,7 +7,6 @@ #include //! [Include] #include -#include #include #include #include @@ -39,36 +38,46 @@ int main(int argc, const char **argv) for (int i = 1; i < argc; i++) { if (std::string(argv[i]) == "--pose_method" && i + 1 < argc) { poseEstimationMethod = (vpDetectorAprilTag::vpPoseEstimationMethod)atoi(argv[i + 1]); - } else if (std::string(argv[i]) == "--tag_size" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--tag_size" && i + 1 < argc) { tagSize = atof(argv[i + 1]); - } else if (std::string(argv[i]) == "--quad_decimate" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--quad_decimate" && i + 1 < argc) { quad_decimate = (float)atof(argv[i + 1]); - } else if (std::string(argv[i]) == "--nthreads" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--nthreads" && i + 1 < argc) { nThreads = atoi(argv[i + 1]); - } else if (std::string(argv[i]) == "--display_tag") { + } + else if (std::string(argv[i]) == "--display_tag") { display_tag = true; - } else if (std::string(argv[i]) == "--display_off") { + } + else if (std::string(argv[i]) == "--display_off") { display_off = true; - } else if (std::string(argv[i]) == "--color" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--color" && i + 1 < argc) { color_id = atoi(argv[i + 1]); - } else if (std::string(argv[i]) == "--thickness" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--thickness" && i + 1 < argc) { thickness = (unsigned int)atoi(argv[i + 1]); - } else if (std::string(argv[i]) == "--tag_family" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--tag_family" && i + 1 < argc) { tagFamily = (vpDetectorAprilTag::vpAprilTagFamily)atoi(argv[i + 1]); - } else if (std::string(argv[i]) == "--z_aligned") { + } + else if (std::string(argv[i]) == "--z_aligned") { align_frame = true; - } else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") { + } + else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") { std::cout << "Usage: " << argv[0] - << " [--tag_size (default: 0.053)]" - " [--quad_decimate (default: 1)]" - " [--nthreads (default: 1)]" - " [--pose_method (0: HOMOGRAPHY, 1: HOMOGRAPHY_VIRTUAL_VS, " - " 2: DEMENTHON_VIRTUAL_VS, 3: LAGRANGE_VIRTUAL_VS, " - " 4: BEST_RESIDUAL_VIRTUAL_VS, 5: HOMOGRAPHY_ORTHOGONAL_ITERATION) (default: 0)]" - " [--tag_family (0: TAG_36h11, 1: TAG_36h10 (DEPRECATED), 2: TAG_36ARTOOLKIT (DEPRECATED)," - " 3: TAG_25h9, 4: TAG_25h7 (DEPRECATED), 5: TAG_16h5, 6: TAG_CIRCLE21h7, 7: TAG_CIRCLE49h12," - " 8: TAG_CUSTOM48h12, 9: TAG_STANDARD41h12, 10: TAG_STANDARD52h13) (default: 0)]" - " [--display_tag] [--z_aligned]"; + << " [--tag_size (default: 0.053)]" + " [--quad_decimate (default: 1)]" + " [--nthreads (default: 1)]" + " [--pose_method (0: HOMOGRAPHY, 1: HOMOGRAPHY_VIRTUAL_VS, " + " 2: DEMENTHON_VIRTUAL_VS, 3: LAGRANGE_VIRTUAL_VS, " + " 4: BEST_RESIDUAL_VIRTUAL_VS, 5: HOMOGRAPHY_ORTHOGONAL_ITERATION) (default: 0)]" + " [--tag_family (0: TAG_36h11, 1: TAG_36h10 (DEPRECATED), 2: TAG_36ARTOOLKIT (DEPRECATED)," + " 3: TAG_25h9, 4: TAG_25h7 (DEPRECATED), 5: TAG_16h5, 6: TAG_CIRCLE21h7, 7: TAG_CIRCLE49h12," + " 8: TAG_CUSTOM48h12, 9: TAG_STANDARD41h12, 10: TAG_STANDARD52h13) (default: 0)]" + " [--display_tag] [--z_aligned]"; #if (defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI) || defined(VISP_HAVE_OPENCV)) std::cout << " [--display_off] [--color ] [--thickness ]"; #endif @@ -168,7 +177,8 @@ int main(int argc, const char **argv) if (I_depth_raw[i][j]) { float Z = I_depth_raw[i][j] * depth_scale; depthMap[i][j] = Z; - } else { + } + else { depthMap[i][j] = 0; } } @@ -199,9 +209,11 @@ int main(int argc, const char **argv) &confidence_index)) { if (confidence_index > 0.5) { vpDisplay::displayFrame(I_color2, cMo, cam, tagSize / 2, vpColor::none, 3); - } else if (confidence_index > 0.25) { + } + else if (confidence_index > 0.25) { vpDisplay::displayFrame(I_color2, cMo, cam, tagSize / 2, vpColor::orange, 3); - } else { + } + else { vpDisplay::displayFrame(I_color2, cMo, cam, tagSize / 2, vpColor::red, 3); } std::stringstream ss; @@ -231,8 +243,8 @@ int main(int argc, const char **argv) std::cout << "Benchmark loop processing time" << std::endl; std::cout << "Mean / Median / Std: " << vpMath::getMean(time_vec) << " ms" - << " ; " << vpMath::getMedian(time_vec) << " ms" - << " ; " << vpMath::getStdev(time_vec) << " ms" << std::endl; + << " ; " << vpMath::getMedian(time_vec) << " ms" + << " ; " << vpMath::getStdev(time_vec) << " ms" << std::endl; if (!display_off) { delete d1; @@ -240,7 +252,8 @@ int main(int argc, const char **argv) delete d3; } - } catch (const vpException &e) { + } + catch (const vpException &e) { std::cerr << "Catch an exception: " << e.getMessage() << std::endl; } diff --git a/tutorial/detection/tag/tutorial-apriltag-detector-live.cpp b/tutorial/detection/tag/tutorial-apriltag-detector-live.cpp index f855805155..b3e758e41e 100644 --- a/tutorial/detection/tag/tutorial-apriltag-detector-live.cpp +++ b/tutorial/detection/tag/tutorial-apriltag-detector-live.cpp @@ -31,8 +31,8 @@ int main(int argc, const char **argv) { //! [Macro defined] -#if defined(VISP_HAVE_APRILTAG) && \ - (defined(VISP_HAVE_V4L2) || defined(VISP_HAVE_DC1394) || defined(VISP_HAVE_CMU1394) || \ +#if defined(VISP_HAVE_APRILTAG) && \ + (defined(VISP_HAVE_V4L2) || defined(VISP_HAVE_DC1394) || defined(VISP_HAVE_CMU1394) || \ defined(HAVE_OPENCV_VIDEOIO) || defined(VISP_HAVE_FLYCAPTURE) || defined(VISP_HAVE_REALSENSE2)) //! [Macro defined] @@ -123,11 +123,13 @@ int main(int argc, const char **argv) try { vpCameraParameters cam; cam.initPersProjWithoutDistortion(615.1674805, 615.1675415, 312.1889954, 243.4373779); +#if defined(VISP_HAVE_PUGIXML) vpXmlParserCamera parser; if (!intrinsic_file.empty() && !camera_name.empty()) parser.parse(cam, intrinsic_file, camera_name, vpCameraParameters::perspectiveProjWithoutDistortion); +#endif - //! [Construct grabber] +//! [Construct grabber] #if defined(VISP_HAVE_V4L2) vpV4l2Grabber g; std::ostringstream device; diff --git a/tutorial/detection/tag/tutorial-apriltag-detector.cpp b/tutorial/detection/tag/tutorial-apriltag-detector.cpp index 447e35b6dc..87ba939cfa 100644 --- a/tutorial/detection/tag/tutorial-apriltag-detector.cpp +++ b/tutorial/detection/tag/tutorial-apriltag-detector.cpp @@ -30,52 +30,67 @@ int main(int argc, const char **argv) for (int i = 1; i < argc; i++) { if (std::string(argv[i]) == "--pose_method" && i + 1 < argc) { poseEstimationMethod = (vpDetectorAprilTag::vpPoseEstimationMethod)atoi(argv[i + 1]); - } else if (std::string(argv[i]) == "--tag_size" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--tag_size" && i + 1 < argc) { tagSize = atof(argv[i + 1]); - } else if (std::string(argv[i]) == "--input" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--input" && i + 1 < argc) { input_filename = std::string(argv[i + 1]); - } else if (std::string(argv[i]) == "--quad_decimate" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--quad_decimate" && i + 1 < argc) { quad_decimate = (float)atof(argv[i + 1]); - } else if (std::string(argv[i]) == "--nthreads" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--nthreads" && i + 1 < argc) { nThreads = atoi(argv[i + 1]); - } else if (std::string(argv[i]) == "--intrinsic" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--intrinsic" && i + 1 < argc) { intrinsic_file = std::string(argv[i + 1]); - } else if (std::string(argv[i]) == "--camera_name" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--camera_name" && i + 1 < argc) { camera_name = std::string(argv[i + 1]); - } else if (std::string(argv[i]) == "--display_tag") { + } + else if (std::string(argv[i]) == "--display_tag") { display_tag = true; - } else if (std::string(argv[i]) == "--color" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--color" && i + 1 < argc) { color_id = atoi(argv[i + 1]); - } else if (std::string(argv[i]) == "--thickness" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--thickness" && i + 1 < argc) { thickness = (unsigned int)atoi(argv[i + 1]); - } else if (std::string(argv[i]) == "--tag_family" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--tag_family" && i + 1 < argc) { tagFamily = (vpDetectorAprilTag::vpAprilTagFamily)atoi(argv[i + 1]); - } else if (std::string(argv[i]) == "--z_aligned") { + } + else if (std::string(argv[i]) == "--z_aligned") { z_aligned = true; - } else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") { + } + else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") { std::cout << "Usage: " << argv[0] - << " [--input ] [--tag_size ]" - " [--quad_decimate ] [--nthreads ]" - " [--intrinsic ] [--camera_name ]" - " [--pose_method (0: HOMOGRAPHY, 1: HOMOGRAPHY_VIRTUAL_VS, " - " 2: DEMENTHON_VIRTUAL_VS, 3: LAGRANGE_VIRTUAL_VS, " - " 4: BEST_RESIDUAL_VIRTUAL_VS, 5: HOMOGRAPHY_ORTHOGONAL_ITERATION) (default: 0)]" - " [--tag_family (0: TAG_36h11, 1: TAG_36h10 (DEPRECATED), 2: TAG_36ARTOOLKIT (DEPRECATED)," - " 3: TAG_25h9, 4: TAG_25h7 (DEPRECATED), 5: TAG_16h5, 6: TAG_CIRCLE21h7, 7: TAG_CIRCLE49h12," - " 8: TAG_CUSTOM48h12, 9: TAG_STANDARD41h12, 10: TAG_STANDARD52h13) (default: 0)]" - " [--display_tag] [--color ]" - " [--thickness ] [--z_aligned]" - " [--help]" - << std::endl; + << " [--input ] [--tag_size ]" + " [--quad_decimate ] [--nthreads ]" + " [--intrinsic ] [--camera_name ]" + " [--pose_method (0: HOMOGRAPHY, 1: HOMOGRAPHY_VIRTUAL_VS, " + " 2: DEMENTHON_VIRTUAL_VS, 3: LAGRANGE_VIRTUAL_VS, " + " 4: BEST_RESIDUAL_VIRTUAL_VS, 5: HOMOGRAPHY_ORTHOGONAL_ITERATION) (default: 0)]" + " [--tag_family (0: TAG_36h11, 1: TAG_36h10 (DEPRECATED), 2: TAG_36ARTOOLKIT (DEPRECATED)," + " 3: TAG_25h9, 4: TAG_25h7 (DEPRECATED), 5: TAG_16h5, 6: TAG_CIRCLE21h7, 7: TAG_CIRCLE49h12," + " 8: TAG_CUSTOM48h12, 9: TAG_STANDARD41h12, 10: TAG_STANDARD52h13) (default: 0)]" + " [--display_tag] [--color ]" + " [--thickness ] [--z_aligned]" + " [--help]" + << std::endl; return EXIT_SUCCESS; } } vpCameraParameters cam; cam.initPersProjWithoutDistortion(615.1674805, 615.1675415, 312.1889954, 243.4373779); +#if defined(VISP_HAVE_PUGIXML) vpXmlParserCamera parser; - if (!intrinsic_file.empty() && !camera_name.empty()) + if (!intrinsic_file.empty() && !camera_name.empty()) { parser.parse(cam, intrinsic_file, camera_name, vpCameraParameters::perspectiveProjWithoutDistortion); + } +#endif std::cout << cam << std::endl; std::cout << "poseEstimationMethod: " << poseEstimationMethod << std::endl; @@ -187,7 +202,8 @@ int main(int argc, const char **argv) vpDisplay::displayText(I_color, 20, 20, "Click to quit.", vpColor::red); vpDisplay::flush(I_color); vpDisplay::getClick(I_color); - } catch (const vpException &e) { + } + catch (const vpException &e) { std::cerr << "Catch an exception: " << e.getMessage() << std::endl; } diff --git a/tutorial/image/tutorial-undistort.cpp b/tutorial/image/tutorial-undistort.cpp index 5601624e89..d72b2d8932 100644 --- a/tutorial/image/tutorial-undistort.cpp +++ b/tutorial/image/tutorial-undistort.cpp @@ -14,17 +14,20 @@ int main(int argc, char **argv) for (int i = 1; i < argc; i++) { if (std::string(argv[i]) == "--image" && i + 1 < argc) { opt_input_image = std::string(argv[i + 1]); - } else if (std::string(argv[i]) == "--camera-file" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--camera-file" && i + 1 < argc) { opt_camera_file = std::string(argv[i + 1]); - } else if (std::string(argv[i]) == "--camera-name" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--camera-name" && i + 1 < argc) { opt_camera_name = std::string(argv[i + 1]); - } else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") { + } + else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") { std::cout << argv[0] << " [--image ]" - << " [--camera-file ] [--camera-name ] [--help] [-h]\n" - << std::endl; + << " [--camera-file ] [--camera-name ] [--help] [-h]\n" + << std::endl; std::cout << "Examples: " << std::endl - << argv[0] << std::endl - << argv[0] << " --image chessboard.jpg --camera-file camera.xml --camera-name Camera" << std::endl; + << argv[0] << std::endl + << argv[0] << " --image chessboard.jpg --camera-file camera.xml --camera-name Camera" << std::endl; return EXIT_SUCCESS; } } @@ -35,8 +38,10 @@ int main(int argc, char **argv) vpImageIo::read(I, opt_input_image); //! [Load image] - //! [Load camera parameters from xml] + vpCameraParameters cam; +#if defined(VISP_HAVE_PUGIXML) + //! [Load camera parameters from xml] vpXmlParserCamera p; vpCameraParameters::vpCameraParametersProjType projModel; projModel = vpCameraParameters::perspectiveProjWithDistortion; @@ -44,12 +49,12 @@ int main(int argc, char **argv) vpXmlParserCamera::SEQUENCE_OK) { std::cout << "Cannot found parameters for camera named \"Camera\"" << std::endl; } -//! [Load camera parameters from xml] -//! [Set camera parameters] -#if 0 + //! [Load camera parameters from xml] +#else + //! [Set camera parameters] cam.initPersProjWithDistortion(582.7, 580.6, 326.6, 215.0, -0.3372, 0.4021); -#endif //! [Set camera parameters] +#endif std::cout << cam << std::endl; @@ -62,7 +67,8 @@ int main(int argc, char **argv) std::cout << "Save undistorted image in: " << output_image << std::endl; vpImageIo::write(Iud, output_image); //! [Create image without distortion] - } catch (const vpException &e) { + } + catch (const vpException &e) { std::cout << "Catch an exception: " << e << std::endl; return EXIT_FAILURE; } diff --git a/tutorial/robot/flir-ptu/tutorial-flir-ptu-ibvs.cpp b/tutorial/robot/flir-ptu/tutorial-flir-ptu-ibvs.cpp index 14ad56fd52..11222668e0 100644 --- a/tutorial/robot/flir-ptu/tutorial-flir-ptu-ibvs.cpp +++ b/tutorial/robot/flir-ptu/tutorial-flir-ptu-ibvs.cpp @@ -58,7 +58,6 @@ #include #include -#include #include #include #include @@ -91,55 +90,60 @@ int main(int argc, char **argv) for (int i = 1; i < argc; i++) { if ((std::string(argv[i]) == "--portname" || std::string(argv[i]) == "-p") && (i + 1 < argc)) { opt_portname = std::string(argv[i + 1]); - } else if ((std::string(argv[i]) == "--baudrate" || std::string(argv[i]) == "-b") && (i + 1 < argc)) { + } + else if ((std::string(argv[i]) == "--baudrate" || std::string(argv[i]) == "-b") && (i + 1 < argc)) { opt_baudrate = std::atoi(argv[i + 1]); - } else if ((std::string(argv[i]) == "--network" || std::string(argv[i]) == "-n")) { + } + else if ((std::string(argv[i]) == "--network" || std::string(argv[i]) == "-n")) { opt_network = true; - } else if (std::string(argv[i]) == "--extrinsic" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--extrinsic" && i + 1 < argc) { opt_extrinsic = std::string(argv[i + 1]); - } else if (std::string(argv[i]) == "--constant-gain" || std::string(argv[i]) == "-g") { + } + else if (std::string(argv[i]) == "--constant-gain" || std::string(argv[i]) == "-g") { opt_constant_gain = std::stod(argv[i + 1]); ; - } else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") { + } + else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") { std::cout << "SYNOPSIS" << std::endl - << " " << argv[0] << " [--portname ] [--baudrate ] [--network] " - << "[--extrinsic ] [--constant-gain] [--help] [-h]" << std::endl - << std::endl; + << " " << argv[0] << " [--portname ] [--baudrate ] [--network] " + << "[--extrinsic ] [--constant-gain] [--help] [-h]" << std::endl + << std::endl; std::cout << "DESCRIPTION" << std::endl - << " --portname, -p " << std::endl - << " Set serial or tcp port name." << std::endl - << std::endl - << " --baudrate, -b " << std::endl - << " Set serial communication baud rate. Default: " << opt_baudrate << "." << std::endl - << std::endl - << " --network, -n" << std::endl - << " Get PTU network information (Hostname, IP, Gateway) and exit. " << std::endl - << std::endl - << " --extrinsic " << std::endl - << " YAML file containing extrinsic camera parameters as a vpHomogeneousMatrix." << std::endl - << " It corresponds to the homogeneous transformation eMc, between end-effector" << std::endl - << " and camera frame." << std::endl - << std::endl - << " --constant-gain, -g" << std::endl - << " Constant gain value. Default value: " << opt_constant_gain << std::endl - << std::endl - << " --help, -h" << std::endl - << " Print this helper message. " << std::endl - << std::endl; + << " --portname, -p " << std::endl + << " Set serial or tcp port name." << std::endl + << std::endl + << " --baudrate, -b " << std::endl + << " Set serial communication baud rate. Default: " << opt_baudrate << "." << std::endl + << std::endl + << " --network, -n" << std::endl + << " Get PTU network information (Hostname, IP, Gateway) and exit. " << std::endl + << std::endl + << " --extrinsic " << std::endl + << " YAML file containing extrinsic camera parameters as a vpHomogeneousMatrix." << std::endl + << " It corresponds to the homogeneous transformation eMc, between end-effector" << std::endl + << " and camera frame." << std::endl + << std::endl + << " --constant-gain, -g" << std::endl + << " Constant gain value. Default value: " << opt_constant_gain << std::endl + << std::endl + << " --help, -h" << std::endl + << " Print this helper message. " << std::endl + << std::endl; std::cout << "EXAMPLE" << std::endl - << " - How to get network IP" << std::endl + << " - How to get network IP" << std::endl #ifdef _WIN32 - << " $ " << argv[0] << " --portname COM1 --network" << std::endl - << " Try to connect FLIR PTU to port: COM1 with baudrate: 9600" << std::endl + << " $ " << argv[0] << " --portname COM1 --network" << std::endl + << " Try to connect FLIR PTU to port: COM1 with baudrate: 9600" << std::endl #else - << " $ " << argv[0] << " --portname /dev/ttyUSB0 --network" << std::endl - << " Try to connect FLIR PTU to port: /dev/ttyUSB0 with baudrate: 9600" << std::endl + << " $ " << argv[0] << " --portname /dev/ttyUSB0 --network" << std::endl + << " Try to connect FLIR PTU to port: /dev/ttyUSB0 with baudrate: 9600" << std::endl #endif - << " PTU HostName: PTU-5" << std::endl - << " PTU IP : 169.254.110.254" << std::endl - << " PTU Gateway : 0.0.0.0" << std::endl - << " - How to run this binary using network communication" << std::endl - << " $ " << argv[0] << " --portname tcp:169.254.110.254 --tag-size 0.1 --gain 0.1" << std::endl; + << " PTU HostName: PTU-5" << std::endl + << " PTU IP : 169.254.110.254" << std::endl + << " PTU Gateway : 0.0.0.0" << std::endl + << " - How to run this binary using network communication" << std::endl + << " $ " << argv[0] << " --portname tcp:169.254.110.254 --tag-size 0.1 --gain 0.1" << std::endl; return EXIT_SUCCESS; } @@ -285,7 +289,8 @@ int main(int argc, char **argv) } std::cout << "Stop the robot " << std::endl; robot.setRobotState(vpRobot::STATE_STOP); - } catch (const vpRobotException &e) { + } + catch (const vpRobotException &e) { std::cout << "Catch Flir Ptu exception: " << e.getMessage() << std::endl; robot.setRobotState(vpRobot::STATE_STOP); } diff --git a/tutorial/robot/mbot/raspberry/visp/mbot-apriltag-ibvs.cpp b/tutorial/robot/mbot/raspberry/visp/mbot-apriltag-ibvs.cpp index 9a5ce986db..44650e7db5 100644 --- a/tutorial/robot/mbot/raspberry/visp/mbot-apriltag-ibvs.cpp +++ b/tutorial/robot/mbot/raspberry/visp/mbot-apriltag-ibvs.cpp @@ -37,37 +37,48 @@ int main(int argc, const char **argv) for (int i = 1; i < argc; i++) { if (std::string(argv[i]) == "--tag_size" && i + 1 < argc) { tagSize = std::atof(argv[i + 1]); - } else if (std::string(argv[i]) == "--input" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--input" && i + 1 < argc) { device = std::atoi(argv[i + 1]); - } else if (std::string(argv[i]) == "--quad_decimate" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--quad_decimate" && i + 1 < argc) { quad_decimate = (float)atof(argv[i + 1]); - } else if (std::string(argv[i]) == "--nthreads" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--nthreads" && i + 1 < argc) { nThreads = std::atoi(argv[i + 1]); - } else if (std::string(argv[i]) == "--intrinsic" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--intrinsic" && i + 1 < argc) { intrinsic_file = std::string(argv[i + 1]); - } else if (std::string(argv[i]) == "--camera_name" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--camera_name" && i + 1 < argc) { camera_name = std::string(argv[i + 1]); - } else if (std::string(argv[i]) == "--display_tag") { + } + else if (std::string(argv[i]) == "--display_tag") { display_tag = true; #if defined(VISP_HAVE_X11) - } else if (std::string(argv[i]) == "--display_on") { + } + else if (std::string(argv[i]) == "--display_on") { display_on = true; - } else if (std::string(argv[i]) == "--save_image") { + } + else if (std::string(argv[i]) == "--save_image") { save_image = true; #endif - } else if (std::string(argv[i]) == "--serial_off") { + } + else if (std::string(argv[i]) == "--serial_off") { serial_off = true; - } else if (std::string(argv[i]) == "--tag_family" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--tag_family" && i + 1 < argc) { tagFamily = (vpDetectorAprilTag::vpAprilTagFamily)std::atoi(argv[i + 1]); - } else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") { + } + else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") { std::cout << "Usage: " << argv[0] - << " [--input ] [--tag_size ]" - " [--quad_decimate ] [--nthreads ]" - " [--intrinsic ] [--camera_name ]" - " [--tag_family (0: TAG_36h11, 1: TAG_36h10, 2: " - "TAG_36ARTOOLKIT," - " 3: TAG_25h9, 4: TAG_25h7, 5: TAG_16h5)]" - " [--display_tag]"; + << " [--input ] [--tag_size ]" + " [--quad_decimate ] [--nthreads ]" + " [--intrinsic ] [--camera_name ]" + " [--tag_family (0: TAG_36h11, 1: TAG_36h10, 2: " + "TAG_36ARTOOLKIT," + " 3: TAG_25h9, 4: TAG_25h7, 5: TAG_16h5)]" + " [--display_tag]"; #if defined(VISP_HAVE_X11) std::cout << " [--display_on] [--save_image]"; #endif @@ -111,9 +122,13 @@ int main(int argc, const char **argv) vpCameraParameters cam; cam.initPersProjWithoutDistortion(615.1674805, 615.1675415, I.getWidth() / 2., I.getHeight() / 2.); + +#if defined(VISP_HAVE_PUGIXML) vpXmlParserCamera parser; - if (!intrinsic_file.empty() && !camera_name.empty()) + if (!intrinsic_file.empty() && !camera_name.empty()) { parser.parse(cam, intrinsic_file, camera_name, vpCameraParameters::perspectiveProjWithoutDistortion); + } +#endif std::cout << "cam:\n" << cam << std::endl; std::cout << "tagFamily: " << tagFamily << std::endl; @@ -160,8 +175,8 @@ int main(int argc, const char **argv) double Z_d = 0.4; // Define the desired polygon corresponding the the AprilTag CLOCKWISE - double X[4] = {tagSize / 2., tagSize / 2., -tagSize / 2., -tagSize / 2.}; - double Y[4] = {tagSize / 2., -tagSize / 2., -tagSize / 2., tagSize / 2.}; + double X[4] = { tagSize / 2., tagSize / 2., -tagSize / 2., -tagSize / 2. }; + double Y[4] = { tagSize / 2., -tagSize / 2., -tagSize / 2., tagSize / 2. }; std::vector vec_P, vec_P_d; for (int i = 0; i < 4; i++) { @@ -177,7 +192,7 @@ int main(int argc, const char **argv) vpMomentGravityCenter mg, mg_d; vpMomentCentered mc, mc_d; vpMomentAreaNormalized man(0, Z_d), - man_d(0, Z_d); // Declare normalized area. Desired area parameter will be updated below with m00 + man_d(0, Z_d); // Declare normalized area. Desired area parameter will be updated below with m00 vpMomentGravityCenterNormalized mgn, mgn_d; // Declare normalized gravity center // Desired moments @@ -313,8 +328,9 @@ int main(int argc, const char **argv) if (!serial_off) { serial->write(ss.str()); } - } else { - // stop the robot + } + else { + // stop the robot if (!serial_off) { serial->write("LED_RING=2,10,0,0\n"); // Switch on led 2 to red: tag not detected // serial->write("LED_RING=3,0,0,0\n"); // Switch on led 3 to blue: motor left not servoed @@ -340,15 +356,16 @@ int main(int argc, const char **argv) std::cout << "Benchmark computation time" << std::endl; std::cout << "Mean / Median / Std: " << vpMath::getMean(time_vec) << " ms" - << " ; " << vpMath::getMedian(time_vec) << " ms" - << " ; " << vpMath::getStdev(time_vec) << " ms" << std::endl; + << " ; " << vpMath::getMedian(time_vec) << " ms" + << " ; " << vpMath::getStdev(time_vec) << " ms" << std::endl; if (display_on) delete d; if (!serial_off) { delete serial; } - } catch (const vpException &e) { + } + catch (const vpException &e) { std::cerr << "Catch an exception: " << e.getMessage() << std::endl; if (!serial_off) { serial->write("LED_RING=1,10,0,0\n"); // Switch on led 1 to red diff --git a/tutorial/robot/mbot/raspberry/visp/mbot-apriltag-pbvs.cpp b/tutorial/robot/mbot/raspberry/visp/mbot-apriltag-pbvs.cpp index d9726cf305..c9d7d1c1ae 100644 --- a/tutorial/robot/mbot/raspberry/visp/mbot-apriltag-pbvs.cpp +++ b/tutorial/robot/mbot/raspberry/visp/mbot-apriltag-pbvs.cpp @@ -28,36 +28,47 @@ int main(int argc, const char **argv) for (int i = 1; i < argc; i++) { if (std::string(argv[i]) == "--tag_size" && i + 1 < argc) { tagSize = std::atof(argv[i + 1]); - } else if (std::string(argv[i]) == "--input" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--input" && i + 1 < argc) { device = std::atoi(argv[i + 1]); - } else if (std::string(argv[i]) == "--quad_decimate" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--quad_decimate" && i + 1 < argc) { quad_decimate = (float)atof(argv[i + 1]); - } else if (std::string(argv[i]) == "--nthreads" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--nthreads" && i + 1 < argc) { nThreads = std::atoi(argv[i + 1]); - } else if (std::string(argv[i]) == "--intrinsic" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--intrinsic" && i + 1 < argc) { intrinsic_file = std::string(argv[i + 1]); - } else if (std::string(argv[i]) == "--camera_name" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--camera_name" && i + 1 < argc) { camera_name = std::string(argv[i + 1]); - } else if (std::string(argv[i]) == "--display_tag") { + } + else if (std::string(argv[i]) == "--display_tag") { display_tag = true; #if defined(VISP_HAVE_X11) - } else if (std::string(argv[i]) == "--display_on") { + } + else if (std::string(argv[i]) == "--display_on") { display_on = true; - } else if (std::string(argv[i]) == "--save_image") { + } + else if (std::string(argv[i]) == "--save_image") { save_image = true; #endif - } else if (std::string(argv[i]) == "--serial_off") { + } + else if (std::string(argv[i]) == "--serial_off") { serial_off = true; - } else if (std::string(argv[i]) == "--tag_family" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--tag_family" && i + 1 < argc) { tagFamily = (vpDetectorAprilTag::vpAprilTagFamily)atoi(argv[i + 1]); - } else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") { + } + else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") { std::cout << "Usage: " << argv[0] - << " [--input ] [--tag_size ]" - " [--quad_decimate ] [--nthreads ]" - " [--intrinsic ] [--camera_name ]" - " [--tag_family (0: TAG_36h11, 1: TAG_36h10, 2: TAG_36ARTOOLKIT," - " 3: TAG_25h9, 4: TAG_25h7, 5: TAG_16h5)]" - " [--display_tag]"; + << " [--input ] [--tag_size ]" + " [--quad_decimate ] [--nthreads ]" + " [--intrinsic ] [--camera_name ]" + " [--tag_family (0: TAG_36h11, 1: TAG_36h10, 2: TAG_36ARTOOLKIT," + " 3: TAG_25h9, 4: TAG_25h7, 5: TAG_16h5)]" + " [--display_tag]"; #if defined(VISP_HAVE_X11) std::cout << " [--display_on] [--save_image]"; #endif @@ -101,9 +112,13 @@ int main(int argc, const char **argv) vpCameraParameters cam; cam.initPersProjWithoutDistortion(615.1674805, 615.1675415, I.getWidth() / 2., I.getHeight() / 2.); + +#if defined(VISP_HAVE_PUGIXML) vpXmlParserCamera parser; - if (!intrinsic_file.empty() && !camera_name.empty()) + if (!intrinsic_file.empty() && !camera_name.empty()) { parser.parse(cam, intrinsic_file, camera_name, vpCameraParameters::perspectiveProjWithoutDistortion); + } +#endif std::cout << "cam:\n" << cam << std::endl; std::cout << "tagFamily: " << tagFamily << std::endl; @@ -226,8 +241,9 @@ int main(int argc, const char **argv) if (!serial_off) { serial->write(ss.str()); } - } else { - // stop the robot + } + else { + // stop the robot if (!serial_off) { serial->write("LED_RING=2,10,0,0\n"); // Switch on led 2 to red: tag not detected // serial->write("LED_RING=3,0,0,0\n"); // Switch on led 3 to blue: motor left not servoed @@ -252,15 +268,16 @@ int main(int argc, const char **argv) std::cout << "Benchmark computation time" << std::endl; std::cout << "Mean / Median / Std: " << vpMath::getMean(time_vec) << " ms" - << " ; " << vpMath::getMedian(time_vec) << " ms" - << " ; " << vpMath::getStdev(time_vec) << " ms" << std::endl; + << " ; " << vpMath::getMedian(time_vec) << " ms" + << " ; " << vpMath::getStdev(time_vec) << " ms" << std::endl; if (display_on) delete d; if (!serial_off) { delete serial; } - } catch (const vpException &e) { + } + catch (const vpException &e) { std::cerr << "Catch an exception: " << e.getMessage() << std::endl; if (!serial_off) { serial->write("LED_RING=1,10,0,0\n"); // Switch on led 1 to red diff --git a/tutorial/tracking/model-based/generic-apriltag/tutorial-mb-generic-tracker-apriltag-rs2.cpp b/tutorial/tracking/model-based/generic-apriltag/tutorial-mb-generic-tracker-apriltag-rs2.cpp index 0a15fd9487..349fdd3a5e 100644 --- a/tutorial/tracking/model-based/generic-apriltag/tutorial-mb-generic-tracker-apriltag-rs2.cpp +++ b/tutorial/tracking/model-based/generic-apriltag/tutorial-mb-generic-tracker-apriltag-rs2.cpp @@ -3,7 +3,6 @@ #include #include -#include #include #include #include diff --git a/tutorial/tracking/model-based/generic-apriltag/tutorial-mb-generic-tracker-apriltag-webcam.cpp b/tutorial/tracking/model-based/generic-apriltag/tutorial-mb-generic-tracker-apriltag-webcam.cpp index 4f25ea7001..c49f2bfb03 100644 --- a/tutorial/tracking/model-based/generic-apriltag/tutorial-mb-generic-tracker-apriltag-webcam.cpp +++ b/tutorial/tracking/model-based/generic-apriltag/tutorial-mb-generic-tracker-apriltag-webcam.cpp @@ -193,11 +193,13 @@ int main(int argc, const char **argv) vpCameraParameters cam; bool camIsInit = false; +#if defined(VISP_HAVE_PUGIXML) vpXmlParserCamera parser; if (!opt_intrinsic_file.empty() && !opt_camera_name.empty()) { parser.parse(cam, opt_intrinsic_file, opt_camera_name, vpCameraParameters::perspectiveProjWithoutDistortion); camIsInit = true; } +#endif try { vpImage I; diff --git a/tutorial/tracking/model-based/generic-rgbd-blender/tutorial-mb-generic-tracker-rgbd-blender.cpp b/tutorial/tracking/model-based/generic-rgbd-blender/tutorial-mb-generic-tracker-rgbd-blender.cpp index 20f6dd453c..3cdde9ab07 100644 --- a/tutorial/tracking/model-based/generic-rgbd-blender/tutorial-mb-generic-tracker-rgbd-blender.cpp +++ b/tutorial/tracking/model-based/generic-rgbd-blender/tutorial-mb-generic-tracker-rgbd-blender.cpp @@ -10,7 +10,7 @@ #include #include -#if defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGCODECS) +#if defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGCODECS) && defined(VISP_HAVE_PUGIXML) namespace { bool read_data(unsigned int cpt, const std::string &video_color_images, const std::string &video_depth_images, @@ -225,6 +225,7 @@ int main(int argc, const char **argv) != vpXmlParserCamera::SEQUENCE_OK) { std::cout << "Cannot found intrinsics for camera " << depth_camera_name << std::endl; } + if (!opt_disable_depth) tracker.setCameraParameters(cam_color, cam_depth); else diff --git a/tutorial/tracking/model-based/generic-rgbd/CMakeLists.txt b/tutorial/tracking/model-based/generic-rgbd/CMakeLists.txt index 89030ec6d4..b3b12cb239 100644 --- a/tutorial/tracking/model-based/generic-rgbd/CMakeLists.txt +++ b/tutorial/tracking/model-based/generic-rgbd/CMakeLists.txt @@ -60,4 +60,5 @@ foreach(cpp ${tutorial_cpp}) endforeach() # Copy the data files -visp_copy_dir(tutorial-mb-generic-tracker-rgbd.cpp "${CMAKE_CURRENT_SOURCE_DIR}" data model) +visp_copy_dir(tutorial-mb-generic-tracker-rgbd.cpp "${CMAKE_CURRENT_SOURCE_DIR}" data) +visp_copy_dir(tutorial-mb-generic-tracker-rgbd.cpp "${CMAKE_CURRENT_SOURCE_DIR}" model) diff --git a/tutorial/tracking/model-based/generic-rgbd/tutorial-mb-generic-tracker-rgbd-realsense.cpp b/tutorial/tracking/model-based/generic-rgbd/tutorial-mb-generic-tracker-rgbd-realsense.cpp index d9997ada0c..f5e2a33140 100644 --- a/tutorial/tracking/model-based/generic-rgbd/tutorial-mb-generic-tracker-rgbd-realsense.cpp +++ b/tutorial/tracking/model-based/generic-rgbd/tutorial-mb-generic-tracker-rgbd-realsense.cpp @@ -3,10 +3,9 @@ #include -#if defined(VISP_HAVE_REALSENSE2) && defined(VISP_HAVE_OPENCV) +#if defined(VISP_HAVE_REALSENSE2) && defined(VISP_HAVE_OPENCV) && defined(VISP_HAVE_PUGIXML) #include #include -#include #include #include #include @@ -33,56 +32,71 @@ int main(int argc, char *argv[]) for (int i = 1; i < argc; i++) { if (std::string(argv[i]) == "--config_color" && i + 1 < argc) { config_color = std::string(argv[i + 1]); - } else if (std::string(argv[i]) == "--config_depth" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--config_depth" && i + 1 < argc) { config_depth = std::string(argv[i + 1]); - } else if (std::string(argv[i]) == "--model_color" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--model_color" && i + 1 < argc) { model_color = std::string(argv[i + 1]); - } else if (std::string(argv[i]) == "--model_depth" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--model_depth" && i + 1 < argc) { model_depth = std::string(argv[i + 1]); - } else if (std::string(argv[i]) == "--init_file" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--init_file" && i + 1 < argc) { init_file = std::string(argv[i + 1]); - } else if (std::string(argv[i]) == "--proj_error_threshold" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--proj_error_threshold" && i + 1 < argc) { proj_error_threshold = std::atof(argv[i + 1]); - } else if (std::string(argv[i]) == "--use_ogre") { + } + else if (std::string(argv[i]) == "--use_ogre") { use_ogre = true; - } else if (std::string(argv[i]) == "--use_scanline") { + } + else if (std::string(argv[i]) == "--use_scanline") { use_scanline = true; - } else if (std::string(argv[i]) == "--use_edges" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--use_edges" && i + 1 < argc) { use_edges = (std::atoi(argv[i + 1]) == 0 ? false : true); - } else if (std::string(argv[i]) == "--use_klt" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--use_klt" && i + 1 < argc) { use_klt = (std::atoi(argv[i + 1]) == 0 ? false : true); - } else if (std::string(argv[i]) == "--use_depth" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--use_depth" && i + 1 < argc) { use_depth = (std::atoi(argv[i + 1]) == 0 ? false : true); - } else if (std::string(argv[i]) == "--learn") { + } + else if (std::string(argv[i]) == "--learn") { learn = true; - } else if (std::string(argv[i]) == "--learning_data" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--learning_data" && i + 1 < argc) { learning_data = argv[i + 1]; - } else if (std::string(argv[i]) == "--auto_init") { + } + else if (std::string(argv[i]) == "--auto_init") { auto_init = true; - } else if (std::string(argv[i]) == "--display_proj_error") { + } + else if (std::string(argv[i]) == "--display_proj_error") { display_projection_error = true; - } else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") { + } + else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") { std::cout << "Usage: \n" - << argv[0] - << " [--model_color ] [--model_depth ]" - " [--config_color ] [--config_depth ]" - " [--init_file ] [--use_ogre] [--use_scanline]" - " [--proj_error_threshold (default: " - << proj_error_threshold - << ")]" - " [--use_edges <0|1> (default: 1)] [--use_klt <0|1> (default: 1)] [--use_depth <0|1> (default: 1)]" - " [--learn] [--auto_init] [--learning_data (default: learning/data-learned.bin)]" - " [--display_proj_error]" - << std::endl; + << argv[0] + << " [--model_color ] [--model_depth ]" + " [--config_color ] [--config_depth ]" + " [--init_file ] [--use_ogre] [--use_scanline]" + " [--proj_error_threshold (default: " + << proj_error_threshold + << ")]" + " [--use_edges <0|1> (default: 1)] [--use_klt <0|1> (default: 1)] [--use_depth <0|1> (default: 1)]" + " [--learn] [--auto_init] [--learning_data (default: learning/data-learned.bin)]" + " [--display_proj_error]" + << std::endl; std::cout << "\n** How to track a 4.2 cm width cube with manual initialization:\n" - << argv[0] << " --model_color model/cube/cube.cao --use_edges 1 --use_klt 1 --use_depth 1" << std::endl; + << argv[0] << " --model_color model/cube/cube.cao --use_edges 1 --use_klt 1 --use_depth 1" << std::endl; std::cout << "\n** How to learn the cube and create a learning database:\n" - << argv[0] << " --model_color model/cube/cube.cao --use_edges 1 --use_klt 1 --use_depth 1 --learn" - << std::endl; + << argv[0] << " --model_color model/cube/cube.cao --use_edges 1 --use_klt 1 --use_depth 1 --learn" + << std::endl; std::cout << "\n** How to track the cube with initialization from learning database:\n" - << argv[0] << " --model_color model/cube/cube.cao --use_edges 1 --use_klt 1 --use_depth 1 --auto_init" - << std::endl; + << argv[0] << " --model_color model/cube/cube.cao --use_edges 1 --use_klt 1 --use_depth 1 --auto_init" + << std::endl; return EXIT_SUCCESS; } @@ -112,15 +126,15 @@ int main(int argc, char *argv[]) std::cout << " Display proj. error: " << display_projection_error << std::endl; std::cout << "Config files: " << std::endl; std::cout << " Config color: " - << "\"" << config_color << "\"" << std::endl; + << "\"" << config_color << "\"" << std::endl; std::cout << " Config depth: " - << "\"" << config_depth << "\"" << std::endl; + << "\"" << config_depth << "\"" << std::endl; std::cout << " Model color : " - << "\"" << model_color << "\"" << std::endl; + << "\"" << model_color << "\"" << std::endl; std::cout << " Model depth : " - << "\"" << model_depth << "\"" << std::endl; + << "\"" << model_depth << "\"" << std::endl; std::cout << " Init file : " - << "\"" << init_file << "\"" << std::endl; + << "\"" << init_file << "\"" << std::endl; std::cout << "Learning options : " << std::endl; std::cout << " Learn : " << learn << std::endl; std::cout << " Auto init : " << auto_init << std::endl; @@ -133,8 +147,8 @@ int main(int argc, char *argv[]) if (config_color.empty() || config_depth.empty() || model_color.empty() || model_depth.empty() || init_file.empty()) { std::cout << "config_color.empty() || config_depth.empty() || model_color.empty() || model_depth.empty() || " - "init_file.empty()" - << std::endl; + "init_file.empty()" + << std::endl; return EXIT_FAILURE; } @@ -147,16 +161,17 @@ int main(int argc, char *argv[]) try { realsense.open(config); - } catch (const vpException &e) { + } + catch (const vpException &e) { std::cout << "Catch an exception: " << e.what() << std::endl; std::cout << "Check if the Realsense camera is connected..." << std::endl; return EXIT_SUCCESS; } vpCameraParameters cam_color = - realsense.getCameraParameters(RS2_STREAM_COLOR, vpCameraParameters::perspectiveProjWithoutDistortion); + realsense.getCameraParameters(RS2_STREAM_COLOR, vpCameraParameters::perspectiveProjWithoutDistortion); vpCameraParameters cam_depth = - realsense.getCameraParameters(RS2_STREAM_DEPTH, vpCameraParameters::perspectiveProjWithoutDistortion); + realsense.getCameraParameters(RS2_STREAM_DEPTH, vpCameraParameters::perspectiveProjWithoutDistortion); std::cout << "Sensor internal camera parameters for color camera: " << cam_color << std::endl; std::cout << "Sensor internal camera parameters for depth camera: " << cam_depth << std::endl; @@ -239,11 +254,13 @@ int main(int argc, char *argv[]) mapOfImages["Camera2"] = &I_depth; mapOfInitFiles["Camera1"] = init_file; tracker.setCameraParameters(cam_color, cam_depth); - } else if (use_edges || use_klt) { + } + else if (use_edges || use_klt) { tracker.loadConfigFile(config_color); tracker.loadModel(model_color); tracker.setCameraParameters(cam_color); - } else if (use_depth) { + } + else if (use_depth) { tracker.loadConfigFile(config_depth); tracker.loadModel(model_depth); tracker.setCameraParameters(cam_depth); @@ -288,7 +305,8 @@ int main(int argc, char *argv[]) return EXIT_FAILURE; } keypoint.loadLearningData(learning_data, true); - } else { + } + else { if ((use_edges || use_klt) && use_depth) tracker.initClick(mapOfImages, mapOfInitFiles, true); else if (use_edges || use_klt) @@ -335,9 +353,11 @@ int main(int argc, char *argv[]) mapOfPointclouds["Camera2"] = &pointcloud; mapOfWidths["Camera2"] = width; mapOfHeights["Camera2"] = height; - } else if (use_edges || use_klt) { + } + else if (use_edges || use_klt) { mapOfImages["Camera"] = &I_gray; - } else if (use_depth) { + } + else if (use_depth) { mapOfPointclouds["Camera"] = &pointcloud; mapOfWidths["Camera"] = width; mapOfHeights["Camera"] = height; @@ -351,12 +371,15 @@ int main(int argc, char *argv[]) mapOfCameraPoses["Camera1"] = cMo; mapOfCameraPoses["Camera2"] = depth_M_color * cMo; tracker.initFromPose(mapOfImages, mapOfCameraPoses); - } else if (use_edges || use_klt) { + } + else if (use_edges || use_klt) { tracker.initFromPose(I_gray, cMo); - } else if (use_depth) { + } + else if (use_depth) { tracker.initFromPose(I_depth, depth_M_color * cMo); } - } else { + } + else { if (use_edges || use_klt) { vpDisplay::flush(I_gray); } @@ -377,12 +400,15 @@ int main(int argc, char *argv[]) } if ((use_edges || use_klt) && use_depth) { tracker.track(mapOfImages, mapOfPointclouds, mapOfWidths, mapOfHeights); - } else if (use_edges || use_klt) { + } + else if (use_edges || use_klt) { tracker.track(I_gray); - } else if (use_depth) { + } + else if (use_depth) { tracker.track(mapOfImages, mapOfPointclouds, mapOfWidths, mapOfHeights); } - } catch (const vpException &e) { + } + catch (const vpException &e) { std::cout << "Tracker exception: " << e.getStringMessage() << std::endl; tracking_failed = true; if (auto_init) { @@ -399,7 +425,8 @@ int main(int argc, char *argv[]) if (tracker.getTrackerType() & vpMbGenericTracker::EDGE_TRACKER) { // Check tracking errors proj_error = tracker.getProjectionError(); - } else { + } + else { proj_error = tracker.computeCurrentProjectionError(I_gray, cMo, cam_color); } @@ -418,10 +445,12 @@ int main(int argc, char *argv[]) tracker.display(I_gray, I_depth, cMo, depth_M_color * cMo, cam_color, cam_depth, vpColor::red, 3); vpDisplay::displayFrame(I_gray, cMo, cam_color, 0.05, vpColor::none, 3); vpDisplay::displayFrame(I_depth, depth_M_color * cMo, cam_depth, 0.05, vpColor::none, 3); - } else if (use_edges || use_klt) { + } + else if (use_edges || use_klt) { tracker.display(I_gray, cMo, cam_color, vpColor::red, 3); vpDisplay::displayFrame(I_gray, cMo, cam_color, 0.05, vpColor::none, 3); - } else if (use_depth) { + } + else if (use_depth) { tracker.display(I_depth, cMo, cam_depth, vpColor::red, 3); vpDisplay::displayFrame(I_depth, cMo, cam_depth, 0.05, vpColor::none, 3); } @@ -434,7 +463,7 @@ int main(int argc, char *argv[]) { std::stringstream ss; ss << "Features: edges " << tracker.getNbFeaturesEdge() << ", klt " << tracker.getNbFeaturesKlt() - << ", depth " << tracker.getNbFeaturesDepthDense(); + << ", depth " << tracker.getNbFeaturesDepthDense(); vpDisplay::displayText(I_gray, I_gray.getHeight() - 30, 20, ss.str(), vpColor::red); } } @@ -457,9 +486,11 @@ int main(int argc, char *argv[]) if (vpDisplay::getClick(I_gray, button, false)) { if (button == vpMouseButton::button3) { quit = true; - } else if (button == vpMouseButton::button1 && learn) { + } + else if (button == vpMouseButton::button1 && learn) { learn_position = true; - } else if (button == vpMouseButton::button1 && auto_init && !learn) { + } + else if (button == vpMouseButton::button1 && auto_init && !learn) { run_auto_init = true; } } @@ -507,14 +538,15 @@ int main(int argc, char *argv[]) std::cout << "Save learning file: " << learning_data << std::endl; keypoint.saveLearningData(learning_data, true, true); } - } catch (const vpException &e) { + } + catch (const vpException &e) { std::cout << "Catch an exception: " << e.what() << std::endl; } if (!times_vec.empty()) { std::cout << "\nProcessing time, Mean: " << vpMath::getMean(times_vec) - << " ms ; Median: " << vpMath::getMedian(times_vec) << " ; Std: " << vpMath::getStdev(times_vec) << " ms" - << std::endl; + << " ms ; Median: " << vpMath::getMedian(times_vec) << " ; Std: " << vpMath::getStdev(times_vec) << " ms" + << std::endl; } return EXIT_SUCCESS; diff --git a/tutorial/tracking/model-based/generic-rgbd/tutorial-mb-generic-tracker-rgbd-structure-core.cpp b/tutorial/tracking/model-based/generic-rgbd/tutorial-mb-generic-tracker-rgbd-structure-core.cpp index 623f510f33..408fd79777 100644 --- a/tutorial/tracking/model-based/generic-rgbd/tutorial-mb-generic-tracker-rgbd-structure-core.cpp +++ b/tutorial/tracking/model-based/generic-rgbd/tutorial-mb-generic-tracker-rgbd-structure-core.cpp @@ -3,7 +3,7 @@ #include -#if defined(VISP_HAVE_OCCIPITAL_STRUCTURE) && defined(VISP_HAVE_OPENCV) +#if defined(VISP_HAVE_OCCIPITAL_STRUCTURE) && defined(VISP_HAVE_OPENCV) && defined(VISP_HAVE_PUGIXML) #include #include #include @@ -33,56 +33,71 @@ int main(int argc, char *argv[]) for (int i = 1; i < argc; i++) { if (std::string(argv[i]) == "--config_color" && i + 1 < argc) { config_color = std::string(argv[i + 1]); - } else if (std::string(argv[i]) == "--config_depth" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--config_depth" && i + 1 < argc) { config_depth = std::string(argv[i + 1]); - } else if (std::string(argv[i]) == "--model_color" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--model_color" && i + 1 < argc) { model_color = std::string(argv[i + 1]); - } else if (std::string(argv[i]) == "--model_depth" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--model_depth" && i + 1 < argc) { model_depth = std::string(argv[i + 1]); - } else if (std::string(argv[i]) == "--init_file" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--init_file" && i + 1 < argc) { init_file = std::string(argv[i + 1]); - } else if (std::string(argv[i]) == "--proj_error_threshold" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--proj_error_threshold" && i + 1 < argc) { proj_error_threshold = std::atof(argv[i + 1]); - } else if (std::string(argv[i]) == "--use_ogre") { + } + else if (std::string(argv[i]) == "--use_ogre") { use_ogre = true; - } else if (std::string(argv[i]) == "--use_scanline") { + } + else if (std::string(argv[i]) == "--use_scanline") { use_scanline = true; - } else if (std::string(argv[i]) == "--use_edges" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--use_edges" && i + 1 < argc) { use_edges = (std::atoi(argv[i + 1]) == 0 ? false : true); - } else if (std::string(argv[i]) == "--use_klt" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--use_klt" && i + 1 < argc) { use_klt = (std::atoi(argv[i + 1]) == 0 ? false : true); - } else if (std::string(argv[i]) == "--use_depth" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--use_depth" && i + 1 < argc) { use_depth = (std::atoi(argv[i + 1]) == 0 ? false : true); - } else if (std::string(argv[i]) == "--learn") { + } + else if (std::string(argv[i]) == "--learn") { learn = true; - } else if (std::string(argv[i]) == "--learning_data" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--learning_data" && i + 1 < argc) { learning_data = argv[i + 1]; - } else if (std::string(argv[i]) == "--auto_init") { + } + else if (std::string(argv[i]) == "--auto_init") { auto_init = true; - } else if (std::string(argv[i]) == "--display_proj_error") { + } + else if (std::string(argv[i]) == "--display_proj_error") { display_projection_error = true; - } else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") { + } + else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") { std::cout << "Usage: \n" - << argv[0] - << " [--model_color ] [--model_depth ]" - " [--config_color ] [--config_depth ]" - " [--init_file ] [--use_ogre] [--use_scanline]" - " [--proj_error_threshold (default: " - << proj_error_threshold - << ")]" - " [--use_edges <0|1> (default: 1)] [--use_klt <0|1> (default: 1)] [--use_depth <0|1> (default: 1)]" - " [--learn] [--auto_init] [--learning_data (default: learning/data-learned.bin)]" - " [--display_proj_error]" - << std::endl; + << argv[0] + << " [--model_color ] [--model_depth ]" + " [--config_color ] [--config_depth ]" + " [--init_file ] [--use_ogre] [--use_scanline]" + " [--proj_error_threshold (default: " + << proj_error_threshold + << ")]" + " [--use_edges <0|1> (default: 1)] [--use_klt <0|1> (default: 1)] [--use_depth <0|1> (default: 1)]" + " [--learn] [--auto_init] [--learning_data (default: learning/data-learned.bin)]" + " [--display_proj_error]" + << std::endl; std::cout << "\n** How to track a 4.2 cm width cube with manual initialization:\n" - << argv[0] << " --model_color model/cube/cube.cao --use_edges 1 --use_klt 1 --use_depth 1" << std::endl; + << argv[0] << " --model_color model/cube/cube.cao --use_edges 1 --use_klt 1 --use_depth 1" << std::endl; std::cout << "\n** How to learn the cube and create a learning database:\n" - << argv[0] << " --model_color model/cube/cube.cao --use_edges 1 --use_klt 1 --use_depth 1 --learn" - << std::endl; + << argv[0] << " --model_color model/cube/cube.cao --use_edges 1 --use_klt 1 --use_depth 1 --learn" + << std::endl; std::cout << "\n** How to track the cube with initialization from learning database:\n" - << argv[0] << " --model_color model/cube/cube.cao --use_edges 1 --use_klt 1 --use_depth 1 --auto_init" - << std::endl; + << argv[0] << " --model_color model/cube/cube.cao --use_edges 1 --use_klt 1 --use_depth 1 --auto_init" + << std::endl; return EXIT_SUCCESS; } @@ -112,15 +127,15 @@ int main(int argc, char *argv[]) std::cout << " Display proj. error: " << display_projection_error << std::endl; std::cout << "Config files: " << std::endl; std::cout << " Config color: " - << "\"" << config_color << "\"" << std::endl; + << "\"" << config_color << "\"" << std::endl; std::cout << " Config depth: " - << "\"" << config_depth << "\"" << std::endl; + << "\"" << config_depth << "\"" << std::endl; std::cout << " Model color : " - << "\"" << model_color << "\"" << std::endl; + << "\"" << model_color << "\"" << std::endl; std::cout << " Model depth : " - << "\"" << model_depth << "\"" << std::endl; + << "\"" << model_depth << "\"" << std::endl; std::cout << " Init file : " - << "\"" << init_file << "\"" << std::endl; + << "\"" << init_file << "\"" << std::endl; std::cout << "Learning options : " << std::endl; std::cout << " Learn : " << learn << std::endl; std::cout << " Auto init : " << auto_init << std::endl; @@ -133,8 +148,8 @@ int main(int argc, char *argv[]) if (config_color.empty() || config_depth.empty() || model_color.empty() || model_depth.empty() || init_file.empty()) { std::cout << "config_color.empty() || config_depth.empty() || model_color.empty() || model_depth.empty() || " - "init_file.empty()" - << std::endl; + "init_file.empty()" + << std::endl; return EXIT_FAILURE; } @@ -146,7 +161,8 @@ int main(int argc, char *argv[]) try { sc.open(settings); - } catch (const vpException &e) { + } + catch (const vpException &e) { std::cout << "Catch an exception: " << e.what() << std::endl; std::cout << "Check if the Structure Core camera is connected..." << std::endl; return EXIT_SUCCESS; @@ -236,11 +252,13 @@ int main(int argc, char *argv[]) mapOfImages["Camera2"] = &I_depth; mapOfInitFiles["Camera1"] = init_file; tracker.setCameraParameters(cam_color, cam_depth); - } else if (use_edges || use_klt) { + } + else if (use_edges || use_klt) { tracker.loadConfigFile(config_color); tracker.loadModel(model_color); tracker.setCameraParameters(cam_color); - } else if (use_depth) { + } + else if (use_depth) { tracker.loadConfigFile(config_depth); tracker.loadModel(model_depth); tracker.setCameraParameters(cam_depth); @@ -285,7 +303,8 @@ int main(int argc, char *argv[]) return EXIT_FAILURE; } keypoint.loadLearningData(learning_data, true); - } else { + } + else { if ((use_edges || use_klt) && use_depth) tracker.initClick(mapOfImages, mapOfInitFiles, true); else if (use_edges || use_klt) @@ -332,9 +351,11 @@ int main(int argc, char *argv[]) mapOfPointclouds["Camera2"] = &pointcloud; mapOfWidths["Camera2"] = width; mapOfHeights["Camera2"] = height; - } else if (use_edges || use_klt) { + } + else if (use_edges || use_klt) { mapOfImages["Camera"] = &I_gray; - } else if (use_depth) { + } + else if (use_depth) { mapOfPointclouds["Camera"] = &pointcloud; mapOfWidths["Camera"] = width; mapOfHeights["Camera"] = height; @@ -348,12 +369,15 @@ int main(int argc, char *argv[]) mapOfCameraPoses["Camera1"] = cMo; mapOfCameraPoses["Camera2"] = depth_M_color * cMo; tracker.initFromPose(mapOfImages, mapOfCameraPoses); - } else if (use_edges || use_klt) { + } + else if (use_edges || use_klt) { tracker.initFromPose(I_gray, cMo); - } else if (use_depth) { + } + else if (use_depth) { tracker.initFromPose(I_depth, depth_M_color * cMo); } - } else { + } + else { if (use_edges || use_klt) { vpDisplay::flush(I_gray); } @@ -374,12 +398,15 @@ int main(int argc, char *argv[]) } if ((use_edges || use_klt) && use_depth) { tracker.track(mapOfImages, mapOfPointclouds, mapOfWidths, mapOfHeights); - } else if (use_edges || use_klt) { + } + else if (use_edges || use_klt) { tracker.track(I_gray); - } else if (use_depth) { + } + else if (use_depth) { tracker.track(mapOfImages, mapOfPointclouds, mapOfWidths, mapOfHeights); } - } catch (const vpException &e) { + } + catch (const vpException &e) { std::cout << "Tracker exception: " << e.getStringMessage() << std::endl; tracking_failed = true; if (auto_init) { @@ -396,7 +423,8 @@ int main(int argc, char *argv[]) if (tracker.getTrackerType() & vpMbGenericTracker::EDGE_TRACKER) { // Check tracking errors proj_error = tracker.getProjectionError(); - } else { + } + else { proj_error = tracker.computeCurrentProjectionError(I_gray, cMo, cam_color); } @@ -415,10 +443,12 @@ int main(int argc, char *argv[]) tracker.display(I_gray, I_depth, cMo, depth_M_color * cMo, cam_color, cam_depth, vpColor::red, 3); vpDisplay::displayFrame(I_gray, cMo, cam_color, 0.05, vpColor::none, 3); vpDisplay::displayFrame(I_depth, depth_M_color * cMo, cam_depth, 0.05, vpColor::none, 3); - } else if (use_edges || use_klt) { + } + else if (use_edges || use_klt) { tracker.display(I_gray, cMo, cam_color, vpColor::red, 3); vpDisplay::displayFrame(I_gray, cMo, cam_color, 0.05, vpColor::none, 3); - } else if (use_depth) { + } + else if (use_depth) { tracker.display(I_depth, cMo, cam_depth, vpColor::red, 3); vpDisplay::displayFrame(I_depth, cMo, cam_depth, 0.05, vpColor::none, 3); } @@ -431,7 +461,7 @@ int main(int argc, char *argv[]) { std::stringstream ss; ss << "Features: edges " << tracker.getNbFeaturesEdge() << ", klt " << tracker.getNbFeaturesKlt() - << ", depth " << tracker.getNbFeaturesDepthDense(); + << ", depth " << tracker.getNbFeaturesDepthDense(); vpDisplay::displayText(I_gray, I_gray.getHeight() - 30, 20, ss.str(), vpColor::red); } } @@ -454,9 +484,11 @@ int main(int argc, char *argv[]) if (vpDisplay::getClick(I_gray, button, false)) { if (button == vpMouseButton::button3) { quit = true; - } else if (button == vpMouseButton::button1 && learn) { + } + else if (button == vpMouseButton::button1 && learn) { learn_position = true; - } else if (button == vpMouseButton::button1 && auto_init && !learn) { + } + else if (button == vpMouseButton::button1 && auto_init && !learn) { run_auto_init = true; } } @@ -504,14 +536,15 @@ int main(int argc, char *argv[]) std::cout << "Save learning file: " << learning_data << std::endl; keypoint.saveLearningData(learning_data, true, true); } - } catch (const vpException &e) { + } + catch (const vpException &e) { std::cout << "Catch an exception: " << e.what() << std::endl; } if (!times_vec.empty()) { std::cout << "\nProcessing time, Mean: " << vpMath::getMean(times_vec) - << " ms ; Median: " << vpMath::getMedian(times_vec) << " ; Std: " << vpMath::getStdev(times_vec) << " ms" - << std::endl; + << " ms ; Median: " << vpMath::getMedian(times_vec) << " ; Std: " << vpMath::getStdev(times_vec) << " ms" + << std::endl; } return EXIT_SUCCESS; diff --git a/tutorial/tracking/model-based/generic-rgbd/tutorial-mb-generic-tracker-rgbd.cpp b/tutorial/tracking/model-based/generic-rgbd/tutorial-mb-generic-tracker-rgbd.cpp index 017e2b1cc3..8013831c88 100644 --- a/tutorial/tracking/model-based/generic-rgbd/tutorial-mb-generic-tracker-rgbd.cpp +++ b/tutorial/tracking/model-based/generic-rgbd/tutorial-mb-generic-tracker-rgbd.cpp @@ -3,7 +3,6 @@ #include #include -#include #include #include #include @@ -17,7 +16,8 @@ namespace { -struct vpRealsenseIntrinsics_t { +struct vpRealsenseIntrinsics_t +{ float ppx; /**< Horizontal coordinate of the principal point of the image, as a pixel offset from the left edge */ float ppy; /**< Vertical coordinate of the principal point of the image, as @@ -111,7 +111,7 @@ bool read_data(unsigned int cpt, const std::string &input_directory, vpImagepoints[(size_t)(i * width + j)].x = point[0]; pointcloud->points[(size_t)(i * width + j)].y = point[1]; @@ -135,30 +135,37 @@ int main(int argc, char *argv[]) for (int i = 1; i < argc; i++) { if (std::string(argv[i]) == "--input_directory" && i + 1 < argc) { input_directory = std::string(argv[i + 1]); - } else if (std::string(argv[i]) == "--config_color" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--config_color" && i + 1 < argc) { config_color = std::string(argv[i + 1]); - } else if (std::string(argv[i]) == "--config_depth" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--config_depth" && i + 1 < argc) { config_depth = std::string(argv[i + 1]); - } else if (std::string(argv[i]) == "--model_color" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--model_color" && i + 1 < argc) { model_color = std::string(argv[i + 1]); - } else if (std::string(argv[i]) == "--model_depth" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--model_depth" && i + 1 < argc) { model_depth = std::string(argv[i + 1]); - } else if (std::string(argv[i]) == "--init_file" && i + 1 < argc) { + } + else if (std::string(argv[i]) == "--init_file" && i + 1 < argc) { init_file = std::string(argv[i + 1]); - } else if (std::string(argv[i]) == "--disable_depth") { + } + else if (std::string(argv[i]) == "--disable_depth") { disable_depth = true; - } else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") { + } + else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") { std::cout << "Usage: \n" - << argv[0] - << " --input_directory --config_color --config_depth " - " --model_color --model_depth --init_file --disable_depth" - << std::endl; + << argv[0] + << " --input_directory --config_color --config_depth " + " --model_color --model_depth --init_file --disable_depth" + << std::endl; std::cout - << "\nExample:\n" - << argv[0] - << " --config_color model/cube/cube.xml --config_depth model/cube/cube.xml" - " --model_color model/cube/cube.cao --model_depth model/cube/cube.cao --init_file model/cube/cube.init\n" - << std::endl; + << "\nExample:\n" + << argv[0] + << " --config_color model/cube/cube.xml --config_depth model/cube/cube.xml" + " --model_color model/cube/cube.cao --model_depth model/cube/cube.cao --init_file model/cube/cube.init\n" + << std::endl; return EXIT_SUCCESS; } } @@ -179,17 +186,17 @@ int main(int argc, char *argv[]) #endif std::cout << "Config files: " << std::endl; std::cout << " Input directory: " - << "\"" << input_directory << "\"" << std::endl; + << "\"" << input_directory << "\"" << std::endl; std::cout << " Config color: " - << "\"" << config_color << "\"" << std::endl; + << "\"" << config_color << "\"" << std::endl; std::cout << " Config depth: " - << "\"" << config_depth << "\"" << std::endl; + << "\"" << config_depth << "\"" << std::endl; std::cout << " Model color : " - << "\"" << model_color << "\"" << std::endl; + << "\"" << model_color << "\"" << std::endl; std::cout << " Model depth : " - << "\"" << model_depth << "\"" << std::endl; + << "\"" << model_depth << "\"" << std::endl; std::cout << " Init file : " - << "\"" << init_file << "\"" << std::endl; + << "\"" << init_file << "\"" << std::endl; vpImage I_color; //! [Images] @@ -232,9 +239,60 @@ int main(int argc, char *argv[]) trackerTypes.push_back(vpMbGenericTracker::DEPTH_DENSE_TRACKER); vpMbGenericTracker tracker(trackerTypes); //! [Constructor] +#if defined(VISP_HAVE_PUGIXML) //! [Load config file] tracker.loadConfigFile(config_color, config_depth); //! [Load config file] +#else + { + vpCameraParameters cam_color, cam_depth; + cam_color.initPersProjWithoutDistortion(614.9, 614.9, 320.2, 241.5); + cam_depth.initPersProjWithoutDistortion(384.0, 384.0, 320.5, 235.6); + tracker.setCameraParameters(cam_color, cam_depth); + } + + // Edge + vpMe me; + me.setMaskSize(5); + me.setMaskNumber(180); + me.setRange(7); + me.setLikelihoodThresholdType(vpMe::NORMALIZED_THRESHOLD); + me.setThreshold(10); + me.setMu1(0.5); + me.setMu2(0.5); + me.setSampleStep(4); + tracker.setMovingEdge(me); + + // Klt +#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO) + vpKltOpencv klt; + tracker.setKltMaskBorder(5); + klt.setMaxFeatures(300); + klt.setWindowSize(5); + klt.setQuality(0.01); + klt.setMinDistance(5); + klt.setHarrisFreeParameter(0.01); + klt.setBlockSize(3); + klt.setPyramidLevels(3); + + tracker.setKltOpencv(klt); +#endif + + // Depth + tracker.setDepthNormalFeatureEstimationMethod(vpMbtFaceDepthNormal::ROBUST_FEATURE_ESTIMATION); + tracker.setDepthNormalPclPlaneEstimationMethod(2); + tracker.setDepthNormalPclPlaneEstimationRansacMaxIter(200); + tracker.setDepthNormalPclPlaneEstimationRansacThreshold(0.001); + tracker.setDepthNormalSamplingStep(2, 2); + + tracker.setDepthDenseSamplingStep(4, 4); + + tracker.setAngleAppear(vpMath::rad(80.0)); + tracker.setAngleDisappear(vpMath::rad(85.0)); + tracker.setNearClippingDistance(0.001); + tracker.setFarClippingDistance(5.0); + tracker.setClipping(tracker.getClipping() | vpMbtPolygon::FOV_CLIPPING); +#endif //! [Load cao] tracker.loadModel(model_color, model_depth); //! [Load cao] @@ -326,7 +384,7 @@ int main(int argc, char *argv[]) { std::stringstream ss; ss << "Features: edges " << tracker.getNbFeaturesEdge() << ", klt " << tracker.getNbFeaturesKlt() << ", depth " - << tracker.getNbFeaturesDepthDense(); + << tracker.getNbFeaturesDepthDense(); vpDisplay::displayText(I_gray, I_gray.getHeight() - 30, 20, ss.str(), vpColor::red); } @@ -340,13 +398,14 @@ int main(int argc, char *argv[]) frame_cpt++; } - } catch (const vpException &e) { + } + catch (const vpException &e) { std::cout << "Catch exception: " << e.getStringMessage() << std::endl; } std::cout << "\nProcessing time, Mean: " << vpMath::getMean(times_vec) - << " ms ; Median: " << vpMath::getMedian(times_vec) << " ; Std: " << vpMath::getStdev(times_vec) << " ms" - << std::endl; + << " ms ; Median: " << vpMath::getMedian(times_vec) << " ; Std: " << vpMath::getStdev(times_vec) << " ms" + << std::endl; vpDisplay::displayText(I_gray, 60, 20, "Click to quit", vpColor::red); vpDisplay::flush(I_gray); @@ -358,8 +417,8 @@ int main(int argc, char *argv[]) int main() { std::cout << "To run this tutorial, ViSP should be build with PCL library." - " Install libpcl, configure and build again ViSP..." - << std::endl; + " Install libpcl, configure and build again ViSP..." + << std::endl; return EXIT_SUCCESS; } #endif diff --git a/tutorial/tracking/model-based/generic-stereo/tutorial-mb-generic-tracker-stereo-mono.cpp b/tutorial/tracking/model-based/generic-stereo/tutorial-mb-generic-tracker-stereo-mono.cpp index 2d221f28a7..d891b94eb6 100644 --- a/tutorial/tracking/model-based/generic-stereo/tutorial-mb-generic-tracker-stereo-mono.cpp +++ b/tutorial/tracking/model-based/generic-stereo/tutorial-mb-generic-tracker-stereo-mono.cpp @@ -79,10 +79,12 @@ int main(int argc, char **argv) #endif //! [Set parameters] + +#if defined(VISP_HAVE_PUGIXML) //! [Load config file] tracker.loadConfigFile(objectname + ".xml"); //! [Load config file] -#if 0 +#else // Corresponding parameters manually set to have an example code if (opt_tracker == 1 || opt_tracker == 3) { vpMe me; diff --git a/tutorial/tracking/model-based/generic-stereo/tutorial-mb-generic-tracker-stereo.cpp b/tutorial/tracking/model-based/generic-stereo/tutorial-mb-generic-tracker-stereo.cpp index 22d729c08d..8ed5077a6a 100644 --- a/tutorial/tracking/model-based/generic-stereo/tutorial-mb-generic-tracker-stereo.cpp +++ b/tutorial/tracking/model-based/generic-stereo/tutorial-mb-generic-tracker-stereo.cpp @@ -13,7 +13,7 @@ int main(int argc, char **argv) { -#if defined(VISP_HAVE_OPENCV) +#if defined(VISP_HAVE_OPENCV) && defined(VISP_HAVE_PUGIXML) try { std::string opt_videoname_left = "teabox_left.mp4"; std::string opt_videoname_right = "teabox_right.mp4"; @@ -24,23 +24,25 @@ int main(int argc, char **argv) if (std::string(argv[i]) == "--name" && i + 2 < argc) { opt_videoname_left = std::string(argv[i + 1]); opt_videoname_right = std::string(argv[i + 2]); - } else if (std::string(argv[i]) == "--tracker" && i + 2 < argc) { + } + else if (std::string(argv[i]) == "--tracker" && i + 2 < argc) { opt_tracker1 = atoi(argv[i + 1]); opt_tracker2 = atoi(argv[i + 2]); - } else if (std::string(argv[i]) == "--help") { + } + else if (std::string(argv[i]) == "--help") { std::cout << "\nUsage: " << argv[0] - << " [--name