diff --git a/.gitignore b/.gitignore
index 9ea395f..a1756ee 100644
--- a/.gitignore
+++ b/.gitignore
@@ -7,3 +7,5 @@ cmake_install.cmake
install_manifest.txt
compile_commands.json
CTestTestfile.cmake
+CMakeLists.txt.user
+build
diff --git a/CMakeLists.txt b/CMakeLists.txt
new file mode 100644
index 0000000..3f28c54
--- /dev/null
+++ b/CMakeLists.txt
@@ -0,0 +1,88 @@
+CMAKE_MINIMUM_REQUIRED(VERSION 3.8.0)
+
+PROJECT(autocalib-sensor-extrinsics)
+
+SET(CMAKE_CXX_STANDARD 17)
+
+SET(CMAKE_EXPORT_COMPILE_COMMANDS ON)
+
+IF(NOT CMAKE_BUILD_TYPE)
+ SET(CMAKE_BUILD_TYPE Debug)
+ENDIF()
+
+# Set the maximum level of warnings:
+SET(EXTRA_WARNINGS OFF CACHE BOOL "Enable extra warnings apart from -Wall")
+MARK_AS_ADVANCED(EXTRA_WARNINGS)
+
+# Shared options between GCC and CLANG:
+# ======================================
+IF (${CMAKE_CXX_COMPILER_ID} STREQUAL "Clang" OR CMAKE_COMPILER_IS_GNUCXX)
+ IF(CMAKE_BUILD_TYPE MATCHES "Debug")
+ SET(EXTRA_CPP_FLAGS "${EXTRA_CPP_FLAGS} -g")
+ ADD_DEFINITIONS( -D_DEBUG)
+ ADD_DEFINITIONS( -DDEBUG)
+ ENDIF(CMAKE_BUILD_TYPE MATCHES "Debug")
+
+ # Whole program optimization
+ IF(WHOLE_PROGRAM_OPTIMIZATION)
+ SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} --combine")
+ SET(EXES_CXX_FLAGS "${EXES_CXX_FLAGS} -fwhole-program --combine")
+ ENDIF(WHOLE_PROGRAM_OPTIMIZATION)
+
+ # "-mtune=native" generates code optimized for the detected current processor.
+ IF (WIN32 OR "${CMAKE_ARCH}" MATCHES ".*86" OR "${CMAKE_ARCH}" MATCHES "amd64")
+ SET(opt_native_def ON)
+ ENDIF (WIN32 OR "${CMAKE_ARCH}" MATCHES ".*86" OR "${CMAKE_ARCH}" MATCHES "amd64")
+
+ SET( OPTIMIZE_NATIVE ${opt_native_def} CACHE BOOL "GCC/clang optimizations for current processor (-mtune=native). Requires GCC 4.2+")
+
+ IF(CMAKE_BUILD_TYPE STREQUAL "Debug") # Not in debug!
+ SET(OPTIMIZE_NATIVE OFF)
+ ENDIF(CMAKE_BUILD_TYPE STREQUAL "Debug")
+
+ # "-ffast-math"
+ SET( OPTIMIZE_FFAST-MATH OFF CACHE BOOL "GCC/clang optimization for floating math (-ffast-math).")
+ IF(OPTIMIZE_FFAST-MATH)
+ SET(EXTRA_CPP_FLAGS "${EXTRA_CPP_FLAGS} -ffast-math")
+ ENDIF(OPTIMIZE_FFAST-MATH)
+
+ SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${EXTRA_CPP_FLAGS}")
+ENDIF ()
+
+# Project dependencies
+# ======================================
+INCLUDE(cmake_modules/script_MRPT.cmake REQUIRED)
+
+# OpenCV is required to process images from regular cameras
+FIND_PACKAGE(OpenCV REQUIRED)
+INCLUDE_DIRECTORIES(${OpenCV_INCLUDE_DIRS})
+LINK_DIRECTORIES(${OpenCV_LIBS_DIR})
+
+FIND_PACKAGE(VTK REQUIRED)
+
+# PCL is required to segment 3D planes from depth images
+FIND_PACKAGE(PCL REQUIRED)
+INCLUDE_DIRECTORIES(${PCL_INCLUDE_DIRS})
+LINK_DIRECTORIES(${PCL_LIBRARY_DIRS})
+#ADD_DEFINITIONS(${PCL_DEFINITIONS})
+
+# BOOST is required for the unit tests
+FIND_PACKAGE(Boost 1.46.0 REQUIRED system filesystem unit_test_framework serialization)
+
+# Qt5 GUI library
+FIND_PACKAGE(Qt5 COMPONENTS Widgets REQUIRED)
+
+# Set the path to this project' sources
+ADD_DEFINITIONS(-DPROJECT_SOURCE_PATH="${PROJECT_SOURCE_DIR}")
+
+INCLUDE_DIRECTORIES(${PROJECT_SOURCE_DIR}/core)
+INCLUDE_DIRECTORIES(${PROJECT_SOURCE_DIR}/gui)
+
+ADD_SUBDIRECTORY(gui)
+ADD_SUBDIRECTORY(core)
+
+#SET( BUILD_EXAMPLES ON CACHE BOOL "Build examples programs to show functions usage")
+#ADD_SUBDIRECTORY(examples)
+
+#SET( BUILD_TESTS ON CACHE BOOL "Build tests")
+#ADD_SUBDIRECTORY(test)
diff --git a/README.md b/README.md
index 0b57eee..d6f8353 100644
--- a/README.md
+++ b/README.md
@@ -1 +1,120 @@
-# autocalib-sensor-extrinsics
\ No newline at end of file
+# Automatic Calibration of Sensor Extrinsics
+
+## Overview
+An end-to-end application with a graphical user interface for easily calibrating the extrinsics between range and visual sensors.
+
+Specifically, the app can calibrate the extrinsics of RGB-D cameras, 3D LiDARs, and any combination between them or with RGB cameras. Automatic and target-less calibration algorithms based on plane-matching and line-matching have been integrated into the app, allowing the calibration to be performed in any generic scene setting without the need for any specific targets. The app allows visualizing the data and tuning the parameters through a GUI at each stage of the process, thereby easing calibration efforts.
+
+**Author** : [karnikram](https://github.com/karnikram)
+
+**Note** : Solvers are not fully complete for real-world use, contributions are welcome!
+
+
+
+
+
+
+### Includes
+* A Qt-based GUI for integrating all the steps of the calibration process - raw data inspection, selecting sensors and grouping observations, feature extraction and matching, results visualization, iterative reruns with different parameters and algorithms - all into one app.
+
+* An implementation of the calibration from planes algorithm[[1]](#references) for calibrating range sensors (RGB-D cameras and 3D LiDARs), and integrated with the GUI. This includes plane extraction and matching, and least-squares minimization of the geometric cost function.
+
+* An implementation of the calibration from lines algorithm[[2]](#references) for calibration range sensors with RGB cameras, and integrated with the GUI. This includes 2D and 3D line extraction and matching, and least-squares minimization of the geometric cost function.
+
+* Well structured code using OOP principles, and with detailed documentation to encourage more contributions. The core algorithms are written independent of the GUI so they can also be potentially run from the command line, independent of the GUI.
+
+
+## Installation
+
+### Dependencies
+
+* MRPT (built from master in Debug mode)
+* Qt v5.9
+* Point Cloud Library v1.8
+* OpenCV v3.2
+
+### Build
+
+```bash
+git clone https://github.com/karnikram/autocalib-sensor-extrinsics
+cd autocalib-sensor-extrinsics
+mkdir build && cd build
+cmake ..
+make
+./gui/autocalib-sensor-extrinsics
+```
+
+### Unit Tests
+
+TODO
+
+## Usage
+
+[![Demo Video](https://img.youtube.com/vi/IWMdQcNshFI/0.jpg)](https://www.youtube.com/watch?v=IWMdQcNshFI)
+
+
+### Source Reference
+
+The source files have been documented using doxygen style comment blocks. Running the command `doxygen doc/Doxyfile` from the root of the project directory will generate the documentation in HTML that can be browsed.
+
+
+### License
+
+The source code is released under the [new BSD license.](LICENSE)
+
+autocalib-sensor-extrinsics has been tested with Qt 5.9.5, OpenCV 3.2.0, and PCL 1.8 on Ubuntu 18.04. This contains research code, expect that it changes often and any fitness for a particular purpose is disclaimed.
+
+### References
+
+1) E. Fernandez-Moral, J. Gonzalez-Jimenez, P. Rives and V. Arevalo, "Extrinsic calibration of a set of range cameras in 5 seconds without pattern", IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS), Chicago (USA), 2014. [[link]](https://hal.inria.fr/hal-01010219)
+2) A. Perez-Yus, E. Fernandez-Moral, G. Lopez-Nicolas, JJ. Guerrero and P. Rives, "Extrinsic calibration of multiple RGB-D cameras from line observations", IEEE Robotics and Automation Letters 3 (1), 273-280. DOI: 10.1109/LRA.2017.2739104 [[link]](https://hal.inria.fr/hal-01581523)
+
+Please cite the above papers if this application was useful for your research.
+
+---
+
+This project is being developed as a part of [Google Summer of Code](https://summerofcode.withgoogle.com/projects/#4592205176504320).
+
+**Organization** : [Mobile Robot Programming Toolkit](https://github.com/mrpt/mrpt)
+**Mentors** : [Eduardo Fernandez-Moral](https://github.com/EduFdez), [Jose Luis Blanco Claraco](https://github.com/jlblancoc), [Hunter Laux](https://github.com/jolting)
+
+**Weekly Progress Logs**:
+**Project discussion thread**:
+
+**Other minor contributions**: [PR #789](https://github.com/MRPT/mrpt/pull/789), [Issue #2364](https://github.com/PointCloudLibrary/pcl/issues/2364)
+
+## Roadmap
+- [x] Design wireframe for gui and setup a base gui skeleton
+- [x] Load the observations in the dataset into a treeview
+- [x] Setup dock widgets for the app setup, algorithm configuration
+- [x] Setup viewer window using qvtk and mrpt cqtglcanvas widgets
+- [x] Visualize the cloud and image of each observation in the viewers
+- [x] Synchronize the observations in the dataset to form corresponding sets
+- [x] Display the synchronized observations in a separate treeview along with reconfiguration options
+- [x] Visualize each observation set in the viewer using initial values
+- [x] Setup configuration widget for calibration from planes
+- [x] Extract planes from each observation in a set
+- [x] Visualize extracted planes and their properties in the viewer
+- [x] Perform data association between the segmented planes in each set
+- [x] Display the associated planes together in the main viewer with initial calibration values
+- [x] Setup configuration widget for calibration from lines
+- [x] Create and load config file for all the app parameters
+- [x] Extract 2D lines from images and visualize
+- [x] Extract 3D lines from range images and visualize
+- [x] Perform data association between the extracted 3D lines in each set
+- [x] Display the associated 3D lines together in the main viewer
+- [x] Integrate rotation solvers
+- [x] Integrate translation solvers
+- [x] Setup widget for editing initial calibration values loaded from config file
+- [x] Add option to switch between sensors in the viewer for multi sensor calibration
+- [ ] Unit testing for solvers
+- [ ] Integrate ransac
+- [ ] Visualize final result
+
+
+
+
+
+
+
+
diff --git a/cmake_modules/script_MRPT.cmake b/cmake_modules/script_MRPT.cmake
new file mode 100644
index 0000000..bf8af0a
--- /dev/null
+++ b/cmake_modules/script_MRPT.cmake
@@ -0,0 +1,35 @@
+
+SET(RV_HAS_MRPT 0)
+#SET(RV_HAS_MRPT_BASE 0)
+
+# Leave at the user's choice to disable this library:
+OPTION(DISABLE_MRPT "Forces NOT using MRPT, even if it is found by CMake" "OFF")
+MARK_AS_ADVANCED(DISABLE_MRPT)
+
+IF(NOT DISABLE_MRPT)
+ # MRPT library:
+ # --------------------------------------------
+ #find_package(MRPT COMPONENTS base hwdrivers obs slam maps graphslam pbmap QUIET)
+ find_package(MRPT COMPONENTS obs serialization rtti maps gui pbmap QUIET)
+ IF(MRPT_FOUND)
+
+ SET(RV_HAS_MRPT 1)
+ SET(RV_HAS_MRPT_BASE 1)
+ add_definitions(-DRV_HAS_MRPT=1)
+ #add_definitions(-D_HAS_MRPT_BASE=${RV_HAS_MRPT_BASE})
+
+ INCLUDE_DIRECTORIES(${MRPT_INCLUDE_DIRS})
+ link_directories(${MRPT_LIBRARY_DIRS})
+ add_definitions(${MRPT_DEFINITIONS})
+
+ IF($ENV{VERBOSE})
+ MESSAGE(STATUS "MRPT:")
+ MESSAGE(STATUS " Include dirs: ${MRPT_INCLUDE_DIRS}")
+ MESSAGE(STATUS " Library dirs: ${MRPT_LIBRARY_DIRS}")
+ MESSAGE(STATUS " Definitions : ${MRPT_DEFINITIONS}")
+ MESSAGE(STATUS " Libraries : ${MRPT_LIBRARIES}")
+ ENDIF($ENV{VERBOSE})
+
+ ENDIF(MRPT_FOUND)
+
+ENDIF(NOT DISABLE_MRPT)
diff --git a/config_files/app_config_checkerboard.ini b/config_files/app_config_checkerboard.ini
new file mode 100644
index 0000000..4cb7a9c
--- /dev/null
+++ b/config_files/app_config_checkerboard.ini
@@ -0,0 +1,54 @@
+# Master configuration file for all the funcitonality within the app.
+
+[rawlog]
+path=/home/karnik/dataset/checkerboard.rawlog
+
+[initial_calibration]
+#transformation matrices for the sensors in the rawlog
+
+RGBD_1=[1 0 0 0; 0 1 0 0; 0 0 1 0; 0 0 0 1]
+RGBD_2=[0.7071 0 0.7071 0; 0 1 0 0; -0.7071 0 0.7071 0; 0 0 0 1]
+
+[initial_uncertainty]
+#The angular and distance uncertainty of each sensor in the rawlog
+#Units are degrees and meters
+
+RGBD_1=[10 0.1]
+RGBD_2=[10 0.1]
+
+[grouping_observations]
+#maximum acceptable delay between observations in milliseconds
+max_delay=30
+
+[grouped_observations]
+#determines the number of grouped observation sets to use for calibration
+downsample_factor=1
+
+[plane_segmentation]
+#params for pcl integral normal estimation method
+normal_estimation_method=COVARIANCE_MATRIX
+depth_dependent_smoothing=true
+max_depth_change_factor=0.02
+normal_smoothing_size=10.00
+
+#params for pcl organized multiplane segmentation
+angle_threshold=4.00
+min_inliers_frac=0.001
+distance_threshold=0.05
+max_curvature=0.1
+
+#params for removing duplicate planes
+max_cos_normal=0.998
+dist_centre_plane_threshold=0.1 //min distance between center points of planes
+proximity_threshold=0.4 //min distance between the closest points of planes
+
+[line_segmentation]
+canny_low_threshold=150
+canny_high_to_low_ratio=3
+canny_kernel_size=3
+hough_threshold=50
+
+[solver]
+max_iters=10
+min_update=0.00001
+convergence_error=0.00001
diff --git a/config_files/app_config_livingroom.ini b/config_files/app_config_livingroom.ini
new file mode 100755
index 0000000..fd1ed4d
--- /dev/null
+++ b/config_files/app_config_livingroom.ini
@@ -0,0 +1,60 @@
+# Master configuration file for all the funcitonality within the app.
+
+[rawlog]
+path=/home/karnik/dataset/livingroom.rawlog
+
+[initial_calibration]
+#transformation matrices for the sensors in the rawlog
+
+RGBD_1=[0.70711 0 0.70711 0.27100; -0.70711 0 0.70711 -0.03500; 0 -1 0 1.0150; 0 0 0 1]
+RGBD_2=[0 0 1 0.2400; -1 0 0 -0.0490; 0 -1 0 1.0150; 0 0 0 1]
+RGBD_3=[0.70711 0 -0.70711 0.27100; 0.70711 0 0.70711 0.02700; 0 -1 0 1.0150; 0 0 0 1]
+RGBD_4=[1 0 0 0.28500; 0 0 1 -0.0400; 0 -1 0 1.0150; 0 0 0 1]
+
+[initial_uncertainty]
+
+#The initial angular and distance uncertainty of each sensor in the rawlog
+#Units are degrees and meters
+
+RGBD_1=[10 0.1]
+RGBD_2=[10 0.1]
+RGBD_3=[10 0.1]
+RGBD_4=[10 0.1]
+
+[grouping_observations]
+#maximum acceptable delay between observations in milliseconds
+max_delay=30
+
+[grouped_observations]
+#determines the number of grouped observation sets to use for calibration
+downsample_factor=1
+
+[plane_segmentation]
+#params for pcl integral normal estimation method
+normal_estimation_method=COVARIANCE_MATRIX
+depth_dependent_smoothing=true
+max_depth_change_factor=0.02
+normal_smoothing_size=10.00
+
+#params for pcl organized multiplane segmentation
+angle_threshold=4.00
+min_inliers_frac=0.001
+distance_threshold=0.05
+max_curvature=0.1
+
+#params for removing duplicate planes
+max_cos_normal=0.998
+dist_centre_plane_threshold=0.1 //min distance between center points of planes
+proximity_threshold=0.4 //min distance between the closest points of planes
+
+[line_segmentation]
+canny_low_threshold=150
+canny_high_to_low_ratio=3
+canny_kernel_size=3
+hough_threshold=50
+
+[solver]
+max_iters=10
+min_update=0.00001
+convergence_error=0.00001
+
diff --git a/core/CLine.h b/core/CLine.h
new file mode 100644
index 0000000..0775aa1
--- /dev/null
+++ b/core/CLine.h
@@ -0,0 +1,45 @@
+/* +---------------------------------------------------------------------------+
+ | Mobile Robot Programming Toolkit (MRPT) |
+ | http://www.mrpt.org/ |
+ | |
+ | Copyright (c) 2005-2018, Individual contributors, see AUTHORS file |
+ | See: http://www.mrpt.org/Authors - All rights reserved. |
+ | Released under BSD License. See details in http://www.mrpt.org/License |
+ +---------------------------------------------------------------------------+ */
+
+#pragma once
+
+#include
+#include
+
+typedef float Scalar;
+
+/** Represents the extracted line along with its 2D and 3D geometrical characteristics. */
+
+class CLine
+{
+public:
+
+ Scalar rho;
+ Scalar theta;
+
+ Scalar m;
+ Scalar c;
+
+ std::array end_points;
+ std::array end_points3D;
+ Eigen::Vector2i mean_point;
+
+ /** The 2D direction vector. [lx ly 0] */
+ Eigen::Vector3i l;
+
+ /** Equation of the 3D ray passing through the mean_point from the camera centre. */
+ Eigen::Vector3f ray;
+ /** Equation of the 3D normal of the projective plane. */
+ Eigen::Vector3f normal;
+
+ /** 3D coordinates of the mean_point. */
+ Eigen::Vector3f p;
+ /** 3D direction vector of the line. */
+ Eigen::Vector3f v;
+};
diff --git a/core/CMakeLists.txt b/core/CMakeLists.txt
new file mode 100755
index 0000000..50ee8ea
--- /dev/null
+++ b/core/CMakeLists.txt
@@ -0,0 +1,37 @@
+INCLUDE_DIRECTORIES(${MRPT_INCLUDE_DIR})
+INCLUDE_DIRECTORIES(${OpenCV_INCLUDE_DIRS})
+INCLUDE_DIRECTORIES(${PCL_INCLUDE_DIRS})
+
+# List the source files of CORE
+SET(SRC
+ CObservationTree.h
+ CObservationTreeItem.h
+ Utils.h
+ CPlane.h
+ CLine.h
+ correspondences.h
+ solver.h
+ calib_solvers/CExtrinsicCalib.h
+ calib_solvers/CCalibFromPlanes.h
+ calib_solvers/CCalibFromLines.h
+ calib_solvers/TCalibFromPlanesParams.h
+ calib_solvers/TCalibFromLinesParams.h
+ calib_solvers/TExtrinsicCalibParams.h
+ calib_solvers/TSolverResult.h
+
+ CObservationTree.cpp
+ CObservationTreeItem.cpp
+ correspondences.cpp
+ solver.cpp
+ calib_solvers/CExtrinsicCalib.cpp
+ calib_solvers/CCalibFromPlanes.cpp
+ calib_solvers/CCalibFromLines.cpp
+)
+
+# CORE library encapsulates the methods and types for the calibration algorithms
+ADD_LIBRARY(core ${SRC})
+TARGET_LINK_LIBRARIES(core ${MRPT_LIBS} ${OpenCV_LIBS} ${PCL_LIBRARIES}) #${Boost_SERIALIZATION_LIBRARY}
+
+# Tell CMake that the linker language is C++
+SET_TARGET_PROPERTIES(core PROPERTIES LINKER_LANGUAGE CXX)
+#TARGET_INCLUDE_DIRECTORIES(core PUBLIC ${PROJECT_SOURCE_DIR}/build)
diff --git a/core/CObservationTree.cpp b/core/CObservationTree.cpp
new file mode 100644
index 0000000..53a9a02
--- /dev/null
+++ b/core/CObservationTree.cpp
@@ -0,0 +1,293 @@
+#include "CObservationTree.h"
+
+#include
+#include
+#include
+
+using namespace mrpt::io;
+using namespace mrpt::serialization;
+using namespace mrpt::obs;
+
+CObservationTree::CObservationTree(const std::string &rawlog_path, const mrpt::config::CConfigFile &config_file)
+{
+ m_rootitem = new CObservationTreeItem("root");
+ m_rawlog_path = rawlog_path;
+ m_config_file = config_file;
+ m_synced = false;
+}
+
+CObservationTree::~CObservationTree()
+{
+ delete m_rootitem;
+}
+
+void CObservationTree::loadTree()
+{
+ CFileGZInputStream rawlog(m_rawlog_path);
+ CSerializable::Ptr obj;
+
+ bool read = true;
+ std::string sensor_label, obs_label;
+
+ while(read)
+ {
+ try
+ {
+ archiveFrom(rawlog) >> obj;
+
+ CObservation::Ptr obs = std::dynamic_pointer_cast(obj);
+ m_obs_count++;
+
+ sensor_label = obs->sensorLabel;
+ obs_label = sensor_label + " : " + (obs->GetRuntimeClass()->className);
+
+ if(m_obs_count == 1)
+ {
+ m_sensor_labels.push_back(sensor_label);
+ m_count_of_label.push_back(1);
+ }
+
+ else
+ {
+ auto iter = std::find(m_sensor_labels.begin(), m_sensor_labels.end(), sensor_label);
+ if(iter == m_sensor_labels.end())
+ {
+ m_sensor_labels.push_back(sensor_label);
+ m_count_of_label.push_back(1);
+ }
+
+ else
+ m_count_of_label[utils::findItemIndexIn(m_sensor_labels, sensor_label)]++;
+ }
+
+ m_rootitem->appendChild(new CObservationTreeItem("[#" + std::to_string(m_obs_count - 1) + "] " + obs_label, obs, m_rootitem));
+ }
+
+ catch(std::exception &e)
+ {
+ read = false;
+ }
+ }
+
+ Eigen::Matrix4f rt;
+ Eigen::Vector2f uncertain;
+ for(size_t i = 0; i < m_sensor_labels.size(); i++)
+ {
+ m_config_file.read_matrix("initial_calibration", m_sensor_labels[i], rt, Eigen::Matrix4f(), true);
+ m_config_file.read_vector("initial_uncertainty", m_sensor_labels[i], Eigen::Vector2f(), uncertain, true);
+ m_sensor_poses.push_back(rt);
+ m_sensor_pose_uncertainties.push_back(uncertain);
+ }
+}
+
+void CObservationTree::syncObservations(const std::vector &selected_sensor_labels, const int &max_delay)
+{
+ CObservationTreeItem *new_rootitem = new CObservationTreeItem("root");
+ CObservation::Ptr curr_obs, next_obs;
+ std::string curr_sensor_label, obs_label;
+ std::vector sensor_labels_in_set;
+ size_t obs_sets_count = 0;
+ std::pair curr_obs_with_model_id;
+ std::vector> obs_set;
+ mrpt::system::TTimeStamp curr_ts, next_ts, set_ts;
+ double delay;
+
+ std::vector> sync_indices_tmp;
+
+ sync_indices_tmp.resize(selected_sensor_labels.size());
+ m_sync_indices.resize(selected_sensor_labels.size());
+
+ for(size_t i = 0; i < m_rootitem->childCount(); i++)
+ {
+ curr_obs = std::dynamic_pointer_cast(m_rootitem->child(i)->getObservation());
+ curr_sensor_label = curr_obs->sensorLabel;
+ curr_ts = curr_obs->timestamp;
+ curr_obs_with_model_id.first = curr_obs;
+ curr_obs_with_model_id.second = i;
+
+ auto iter = std::find(selected_sensor_labels.begin(), selected_sensor_labels.end(), curr_sensor_label);
+
+ if(iter != selected_sensor_labels.end())
+ {
+ if(obs_set.size() == 0)
+ {
+ sensor_labels_in_set.push_back(curr_sensor_label);
+ obs_set.push_back(curr_obs_with_model_id);
+ set_ts = curr_ts;
+ continue;
+ }
+
+ else
+ {
+ iter = std::find(sensor_labels_in_set.begin(), sensor_labels_in_set.end(), curr_sensor_label);
+ delay = mrpt::system::timeDifference(set_ts, curr_ts);
+ if(iter == sensor_labels_in_set.end() && (delay <= max_delay))
+ {
+ sensor_labels_in_set.push_back(curr_sensor_label);
+ obs_set.push_back(curr_obs_with_model_id);
+ }
+
+ else
+ {
+ if(sensor_labels_in_set.size() == selected_sensor_labels.size())
+ {
+ new_rootitem->appendChild(new CObservationTreeItem("Observations set #" + std::to_string(obs_sets_count++), 0, new_rootitem));
+
+ auto iter2 = obs_set.begin();
+ for(iter = sensor_labels_in_set.begin(); (iter < sensor_labels_in_set.end()) && (iter2 < obs_set.end()); iter++, iter2++)
+ {
+ obs_label = "[#" + std::to_string((*iter2).second) + "] "
+ + (*iter) + " : " + (*iter2).first->GetRuntimeClass()->className;
+ new_rootitem->child(new_rootitem->childCount() - 1)->appendChild(new CObservationTreeItem(obs_label, (*iter2).first,
+ new_rootitem->child(new_rootitem->childCount() - 1), (*iter2).second));
+ sync_indices_tmp[utils::findItemIndexIn(selected_sensor_labels, *iter)].push_back((*iter2).second);
+ }
+ }
+
+ i -= sensor_labels_in_set.size();
+ sensor_labels_in_set.clear();
+ obs_set.clear();
+ }
+ }
+ }
+ }
+
+ // inserting left over set
+ if(obs_set.size() > 0 && sensor_labels_in_set.size() == selected_sensor_labels.size())
+ {
+ new_rootitem->appendChild(new CObservationTreeItem("Observations set #" + std::to_string(obs_sets_count++), 0, new_rootitem));
+
+ auto iter2 = obs_set.begin();
+ for(auto iter = sensor_labels_in_set.begin(); (iter < sensor_labels_in_set.end()) && (iter2 < obs_set.end()); iter++, iter2++)
+ {
+ obs_label = "[#" + std::to_string((*iter2).second) + "] "
+ + (*iter) + " : " + (*iter2).first->GetRuntimeClass()->className;
+ new_rootitem->child(new_rootitem->childCount() - 1)->appendChild(new CObservationTreeItem(obs_label, (*iter2).first,
+ new_rootitem->child(new_rootitem->childCount() - 1), (*iter2).second));
+ sync_indices_tmp[utils::findItemIndexIn(selected_sensor_labels, *iter)].push_back((*iter2).second);
+ }
+
+ sensor_labels_in_set.clear();
+ obs_set.clear();
+ }
+
+ // removing any duplicate entries
+ for(size_t i = 0; i < sync_indices_tmp.size(); i++)
+ {
+ for(size_t j = 0; j < sync_indices_tmp[i].size(); j++)
+ {
+ if(j > 0)
+ {
+ auto iter = std::find(m_sync_indices[i].begin(), m_sync_indices[i].end(), sync_indices_tmp[i][j]);
+ if(iter == m_sync_indices[i].end())
+ m_sync_indices[i].push_back(sync_indices_tmp[i][j]);
+ }
+
+ else
+ m_sync_indices[i].push_back(sync_indices_tmp[i][j]);
+ }
+ }
+
+ m_rootitem = new_rootitem;
+ m_sensor_labels = selected_sensor_labels;
+ m_synced = true;
+ m_sync_offset = max_delay;
+
+ Eigen::Matrix4f rt;
+ Eigen::Vector2f uncertain;
+ for(size_t i = 0; i < m_sensor_labels.size(); i++)
+ {
+ m_config_file.read_matrix("initial_calibration", m_sensor_labels[i], rt, Eigen::Matrix4f(), true);
+ m_config_file.read_vector("initial_uncertainty", m_sensor_labels[i], Eigen::Vector2f(), uncertain, true);
+ m_sensor_poses.push_back(rt);
+ m_sensor_pose_uncertainties.push_back(uncertain);
+ }
+}
+
+std::string CObservationTree::getRawlogPath() const
+{
+ return this->m_rawlog_path;
+}
+
+CObservationTreeItem *CObservationTree::getRootItem() const
+{
+ return this->m_rootitem;
+}
+
+int CObservationTree::getObsCount() const
+{
+ return this->m_obs_count;
+}
+
+int CObservationTree::getNumberOfSensors() const
+{
+ return this->m_sensor_labels.size();
+}
+
+std::vector CObservationTree::getSensorLabels() const
+{
+ return this->m_sensor_labels;
+}
+
+std::vector CObservationTree::getCountOfLabel() const
+{
+ return this->m_count_of_label;
+}
+
+std::vector CObservationTree::getSensorPoses() const
+{
+ return this->m_sensor_poses;
+}
+
+std::vector CObservationTree::getSensorUncertainties() const
+{
+ return this->m_sensor_pose_uncertainties;
+}
+
+void CObservationTree::setSensorPose(const Eigen::Matrix4f &sensor_pose, const int &sensor_index)
+{
+ this->m_sensor_poses[sensor_index] = sensor_pose;
+}
+
+bool CObservationTree::setSensorPoses(const std::vector &sensor_poses)
+{
+ if(sensor_poses.size() != this->m_sensor_labels.size())
+ return 0;
+ else
+ this->m_sensor_poses = sensor_poses;
+}
+
+void CObservationTree::setSensorUncertainty(const Eigen::Vector2f &sensor_uncertainty, const int &sensor_index)
+{
+ this->m_sensor_pose_uncertainties[sensor_index] = sensor_uncertainty;
+}
+
+bool CObservationTree::setSensorUncertainties(const std::vector &uncertainties)
+{
+ if(uncertainties.size() != this->m_sensor_pose_uncertainties.size())
+ return 0;
+ else
+ this->m_sensor_pose_uncertainties = uncertainties;
+}
+
+std::vector> CObservationTree::getSyncIndices() const
+{
+ return this->m_sync_indices;
+}
+
+int CObservationTree::findSyncIndexFromSet(const int &set_id, const std::string &sensor_label) const
+{
+ if(!m_synced)
+ return -1;
+
+ CObservationTreeItem *item = m_rootitem->child(set_id);
+ size_t sensor_index = utils::findItemIndexIn(m_sensor_labels, sensor_label);
+
+ for(size_t i = 0; i < item->childCount(); i++)
+ {
+ if(item->child(i)->getObservation()->sensorLabel == sensor_label)
+ {
+ return utils::findItemIndexIn(m_sync_indices[sensor_index], item->child(i)->getPriorIndex());
+ }
+ }
+}
diff --git a/core/CObservationTree.h b/core/CObservationTree.h
new file mode 100644
index 0000000..9ca13ed
--- /dev/null
+++ b/core/CObservationTree.h
@@ -0,0 +1,136 @@
+#pragma once
+
+#include "Utils.h"
+#include "CObservationTreeItem.h"
+#include
+
+#include
+#include
+#include
+
+/**
+ * Class for loading, storing, and synchronizing the observations from a rawlog file into a tree.
+ * The class also maintains the relative transformations between all the sensors.
+ */
+
+class CObservationTree
+{
+ public:
+
+ /**
+ * \brief Constructor
+ * \param rawlog_path the path of the rawlog file to load observations from.
+ * \param config_file the master app configuration file.
+ */
+ CObservationTree(const std::string &rawlog_path, const mrpt::config::CConfigFile &config_file);
+
+ /**
+ * \brief Destructor
+ */
+ ~CObservationTree();
+
+ /**
+ * \brief loadTree loads the contents of the rawlog into the tree.
+ * \param rawlog_filename the rawlog file.
+ */
+ void loadTree();
+
+ /**
+ * \brief Returns the path of the rawlog file this model was loaded from.
+ */
+ std::string getRawlogPath() const;
+
+ /**
+ * Returns the root item of the tree.
+ */
+ CObservationTreeItem *getRootItem() const;
+
+ /** Returns the count of total number of observations found in the rawlog. */
+ int getObsCount() const;
+
+ /** Returns the number of unique sensors found in the rawlog. */
+ int getNumberOfSensors() const;
+
+ /** Returns a list of the unique sensor labels found in the rawlog. */
+ std::vector getSensorLabels() const;
+
+ /** Returns the count of observations of each label found in the rawlog. */
+ std::vector getCountOfLabel() const;
+
+ /** Returns the poses of the sensors found in the rawlog. */
+ std::vector getSensorPoses() const;
+
+ /** Returns the uncertainties of the sensors found in the rawlog. */
+ std::vector getSensorUncertainties() const;
+
+ /** Sets the poses of the sensors found in the rawlog.
+ * \param sensor_poses the new sensor poses (size should match the number of sensors in the model).
+ * \return true or false depending on whether the poses were set.
+ */
+ bool setSensorPoses(const std::vector &sensor_poses);
+
+ /** Sets the pose of an individual sensor in the rawlog.
+ * \param sensor_pose the pose of the sensor.
+ * \param sensor_index the index of the sensor in the rawlog.
+ */
+ void setSensorPose(const Eigen::Matrix4f &sensor_pose, const int &sensor_index);
+
+ /** Sets the uncertainties of the sensors found in the rawlog.
+ * \param uncertainties The new uncertainites of the sensors(size should match the number of sensors in the model).
+ * \return bool depending on whether the uncertainties were set.
+ */
+ bool setSensorUncertainties(const std::vector &uncertainties);
+
+ /** Set the distance and angular uncertainty of an individual sensor in the rawlog.
+ * \param sensor_uncertainty The distance and angular uncertainty.
+ * \param sensor_index The index of the sensor in the rawlog.
+ */
+ void setSensorUncertainty(const Eigen::Vector2f &sensor_uncertainty, const int &sensor_index);
+
+ /** Groups observations together based on their time stamp proximity.
+ * Stores the results back in the same tree.
+ * \param the labels of the sensors that are to be considered for grouping.
+ * \param max_delay Maximum allowable delay between observations.
+ */
+ void syncObservations(const std::vector &selected_sensor_labels, const int &max_delay);
+
+ /** Returns the indices of the grouped observations with respect to the original tree, grouped by sensor. */
+ std::vector> getSyncIndices() const;
+
+ /** Retuns the sync index (the index in m_sync_indices[sensor_id] of an item within a set, identified by sensor label. */
+ int findSyncIndexFromSet(const int &set_id, const std::string &sensor_label) const;
+
+ protected:
+
+ /** The path of the file the rawlog was loaded from. */
+ std::string m_rawlog_path;
+
+ mrpt::config::CConfigFile m_config_file;
+
+ /** The root parent item from which all the other observations originate. */
+ CObservationTreeItem *m_rootitem;
+
+ /** The total number of observations loaded from the rawlog. */
+ int m_obs_count = 0;
+
+ /** The unique sensor labels found in the rawlog. */
+ std::vector m_sensor_labels;
+
+ /** store count of observations of each observation type. */
+ std::vector m_count_of_label;
+
+ /** bool to indicate whether the elements in the tree have been synchronized or not. */
+ bool m_synced;
+
+ /** m_sync_indices indices of the grouped (synchronized) observations with respect to the original tree, per sensor. */
+ std::vector> m_sync_indices;
+
+ /** the maximum allowable delay between observation items before they can be synced. */
+ int m_sync_offset = -1;
+
+ /** The [R,t] poses of the sensors found in the rawlog. */
+ std::vector m_sensor_poses;
+
+ /** The initial angular and distance uncertainties in the sensor poses. */
+ std::vector m_sensor_pose_uncertainties;
+};
diff --git a/core/CObservationTreeItem.cpp b/core/CObservationTreeItem.cpp
new file mode 100644
index 0000000..a5f5d7e
--- /dev/null
+++ b/core/CObservationTreeItem.cpp
@@ -0,0 +1,84 @@
+#include "CObservationTreeItem.h"
+#include
+
+using namespace mrpt::obs;
+
+CObservationTreeItem::CObservationTreeItem(const std::string &id, const CObservation::Ptr observation, CObservationTreeItem *parent, int prior_index)
+{
+ m_parentitem = parent;
+ m_id = id;
+ m_observation = observation;
+ m_prior_index = prior_index;
+}
+
+CObservationTreeItem::~CObservationTreeItem()
+{
+ for(auto iter = m_childitems.begin(); iter != m_childitems.end(); iter++)
+ {
+ delete (*iter);
+ }
+
+ m_childitems.clear();
+}
+
+void CObservationTreeItem::appendChild(CObservationTreeItem *item)
+{
+ m_childitems.push_back(item);
+}
+
+CObservationTreeItem *CObservationTreeItem::child(int row) const
+{
+ return m_childitems.at(row);
+}
+
+int CObservationTreeItem::childCount() const
+{
+ return m_childitems.size();
+}
+
+int CObservationTreeItem::row() const
+{
+ if(m_parentitem)
+ {
+ auto iter = std::find(m_parentitem->m_childitems.begin(), m_parentitem->m_childitems.end(), const_cast(this));
+ if(iter != m_parentitem->m_childitems.end())
+ return std::distance(m_parentitem->m_childitems.begin(), iter);
+ }
+
+ return 0;
+}
+
+CObservationTreeItem *CObservationTreeItem::parentItem() const
+{
+ return this->m_parentitem;
+}
+
+std::string CObservationTreeItem::itemId() const
+{
+ return this->m_id;
+}
+
+mrpt::system::TTimeStamp CObservationTreeItem::getTimeStamp() const
+{
+ return this->m_observation->timestamp;
+}
+
+CObservation::Ptr CObservationTreeItem::getObservation() const
+{
+ return this->m_observation;
+}
+
+int CObservationTreeItem::getPriorIndex() const
+{
+ return this->m_prior_index;
+}
+
+pcl::PointCloud::Ptr CObservationTreeItem::cloud()
+{
+ return this->m_cloud;
+}
+
+void CObservationTreeItem::setCloud(pcl::PointCloud::Ptr cloud)
+{
+ this->m_cloud = cloud;
+}
diff --git a/core/CObservationTreeItem.h b/core/CObservationTreeItem.h
new file mode 100644
index 0000000..3311e9c
--- /dev/null
+++ b/core/CObservationTreeItem.h
@@ -0,0 +1,86 @@
+#pragma once
+
+#include
+#include
+#include
+#include
+
+/**
+ * Defines the type of each item that
+ * is stored internally in CObservationTree.
+ */
+
+class CObservationTreeItem
+{
+ public:
+
+ /**
+ * Constructor
+ * \param id the item string id.
+ * \param the observation to be contained in the item.
+ * \param parentItem the parent of the item.
+ * \param prior_index the index of the item with respect to the parent in its previous tree, if any.
+ */
+ CObservationTreeItem(const std::string &id, const mrpt::obs::CObservation::Ptr observation = 0, CObservationTreeItem *parentItem = 0,
+ int prior_index = -1);
+
+ ~CObservationTreeItem();
+
+ void appendChild(CObservationTreeItem *child);
+
+ /** Returns the item string id. */
+ std::string itemId() const;
+
+ /** Returns a ponter to the contained observation item. */
+ mrpt::obs::CObservation::Ptr getObservation() const;
+
+ /** Returns the timestamp of the observation contained in the item. */
+ mrpt::system::TTimeStamp getTimeStamp() const;
+
+ /** Returns a pointer to the contained child item, at the specified index, if any. */
+ CObservationTreeItem *child(int row) const;
+
+ /** Returns the number of child items the item contains. */
+ int childCount() const;
+
+ /** Returns the row count of the item within its parent. */
+ int row() const;
+
+ /** Returns the parent item. */
+ CObservationTreeItem *parentItem() const;
+
+ /** Returns the index of the item with respect to its prior tree. */
+ int getPriorIndex() const;
+
+ /** Pointer to the cloud loaded and saved from the observation, for quicker access. */
+ pcl::PointCloud::Ptr cloud();
+
+ /** Save pointer to the loaded cloud for later access. */
+ void setCloud(pcl::PointCloud::Ptr cloud);
+
+ private:
+
+ /** List of child items the item contains. */
+ std::vector m_childitems;
+
+ /** Pointer to the parent of the tree item. */
+ CObservationTreeItem *m_parentitem;
+
+ /** The item identifier string.
+ * Can be of two types depending on whether the item represents an observation or a set item.
+ * 1) [#item_index] sensor_id : class_name
+ * 2) Observation set #set_index
+ */
+ std::string m_id;
+
+ /** The index of the item with respect to the previous tree it was a part of, if any.
+ * For example, the index with respect to the root item in the tree the item belonged to before it was synchronized.
+ */
+ int m_prior_index;
+
+ /** Pointer to the observation contained in the item. */
+ mrpt::obs::CObservation::Ptr m_observation;
+
+ /** Pointer to the cloud loaded and saved from the observation, for quicker access. */
+ pcl::PointCloud::Ptr m_cloud = nullptr;
+};
diff --git a/core/CPlane.h b/core/CPlane.h
new file mode 100755
index 0000000..30baf2f
--- /dev/null
+++ b/core/CPlane.h
@@ -0,0 +1,35 @@
+/* +---------------------------------------------------------------------------+
+ | Mobile Robot Programming Toolkit (MRPT) |
+ | http://www.mrpt.org/ |
+ | |
+ | Copyright (c) 2005-2018, Individual contributors, see AUTHORS file |
+ | See: http://www.mrpt.org/Authors - All rights reserved. |
+ | Released under BSD License. See details in http://www.mrpt.org/License |
+ +---------------------------------------------------------------------------+ */
+#pragma once
+
+#include
+#include
+#include
+
+typedef float Scalar;
+
+/** Store the plane extracted from a depth image (or point cloud) defined by some geometric characteristics. */
+class CPlane
+{
+ public:
+ EIGEN_MAKE_ALIGNED_OPERATOR_NEW
+
+ Eigen::Matrix v3center;
+ Eigen::Matrix v3normal;
+ Scalar d;
+};
+
+/** Store the plane's geometric characteristics and its convex hull. */
+class CPlaneCHull : public CPlane
+{
+ public:
+ pcl::PointCloud::Ptr ConvexHullPtr;
+ std::vector v_hull_indices;
+ std::vector v_inliers;
+};
diff --git a/core/Utils.h b/core/Utils.h
new file mode 100644
index 0000000..e931684
--- /dev/null
+++ b/core/Utils.h
@@ -0,0 +1,93 @@
+#pragma once
+
+#include
+#include
+#include
+#include
+#include
+#include
+
+namespace utils
+{
+ // Define some colours to draw bolobs, patches, etc.
+ namespace colors
+ {
+ static const unsigned char red [10] = {255, 0, 0, 255, 255, 0, 255, 204, 0, 255};
+ static const unsigned char grn [10] = { 0, 255, 0, 255, 0, 255, 160, 51, 128, 222};
+ static const unsigned char blu [10] = { 0, 0, 255, 0, 255, 255, 0 , 204, 0, 173};
+ }
+
+ /** Function template to find the position of an element in a vector. */
+ template
+ size_t findItemIndexIn(const std::vector &vec, const T &item)
+ {
+ auto iter = std::find(vec.begin(), vec.end(), item);
+ if(iter != vec.end())
+ return std::distance(vec.begin(), iter);
+ else
+ return -1;
+ }
+
+ /** Function template to get rotation angles from SE(3) transformation. */
+ template
+ Eigen::Matrix getRotations(const Eigen::Matrix &sensor_pose)
+ {
+ Eigen::Matrix rot_vec;
+ mrpt::math::CMatrixDouble44 mrpt_mat(sensor_pose);
+ mrpt::poses::CPose3D mrpt_pose(mrpt_mat);
+ rot_vec[0] = (mrpt_pose.roll() * 180) / M_PI;
+ rot_vec[1] = (mrpt_pose.pitch() * 180) / M_PI;
+ rot_vec[2] = (mrpt_pose.yaw() * 180) / M_PI;
+ return rot_vec;
+ }
+
+ /** Function template to convert rotation angles to SO(3). */
+ template
+ Eigen::Matrix getRotationMatrix(const Eigen::Matrix &angles)
+ {
+ Eigen::Matrix r, rx, ry, rz;
+
+ rx << 1, 0, 0, 0, cos(angles(0)), -1 * sin(angles(0)), 0, sin(angles(0)), cos(angles(0));
+ ry << cos(angles(1)), 0, sin(angles(1)), 0, 1, 0, -1 * sin(angles(1)), 0, cos(angles(1));
+ rz << cos(angles(2)), -1 * sin(angles(2)), 0, sin(angles(2)), cos(angles(2)), 0, 0, 0, 1;
+
+ r = rz * ry * rx;
+ return r;
+ }
+
+ /** Function template to get translation from SE(3) transformation. */
+ template
+ Eigen::Matrix getTranslations(const Eigen::Matrix &sensor_pose)
+ {
+ return sensor_pose.block(0,3,3,1);
+ }
+
+ template
+ void transformPoint(const Eigen::Affine3f &transform, T &point)
+ {
+ Eigen::Vector3f tpoint(3);
+ tpoint(0) = point.x;
+ tpoint(1) = point.y;
+ tpoint(2) = point.z;
+
+ tpoint = transform * tpoint;
+ point.x = tpoint(0);
+ point.y = tpoint(1);
+ point.z = tpoint(2);
+ }
+
+ /**
+ * \brief Function template to back project 2D point to its corresponding 3D point in space
+ * \param point the 2D pixel coordinates
+ * \param range the depth image
+ * \param params the intrinsic parameters of the camera
+ * \param point3D the calculated 3D point in space
+ */
+ template
+ void backprojectTo3D(T &point, Eigen::MatrixXf &range, const mrpt::img::TCamera ¶ms, S &point3D)
+ {
+ point3D[2] = range(point[1], point[0]);
+ point3D[0] = ((point[0] - params.cx())/params.fx()) * point3D[2];
+ point3D[1] = ((point[1] - params.cy())/params.fy()) * point3D[2];
+ }
+}
diff --git a/core/calib_solvers/CCalibFromLines.cpp b/core/calib_solvers/CCalibFromLines.cpp
new file mode 100755
index 0000000..c7baafe
--- /dev/null
+++ b/core/calib_solvers/CCalibFromLines.cpp
@@ -0,0 +1,296 @@
+#include "CCalibFromLines.h"
+#include
+
+CCalibFromLines::CCalibFromLines(CObservationTree *model, TCalibFromLinesParams *params) :
+ CExtrinsicCalib(model)
+{
+ this->params = params;
+}
+
+CCalibFromLines::~CCalibFromLines(){}
+
+void CCalibFromLines::segmentLines(const cv::Mat &image, Eigen::MatrixXf &range, const mrpt::img::TCamera &camera_params, const Eigen::Affine3f &intensity_to_depth_transform, std::vector &lines)
+{
+ cv::Mat canny_image;
+
+ cv::Canny(image, canny_image, params->seg.clow_threshold, params->seg.clow_threshold * params->seg.chigh_to_low_ratio, params->seg.ckernel_size);
+
+ std::vector hlines;
+
+ cv::HoughLines(canny_image, hlines, 1, CV_PI, params->seg.hthreshold);
+
+
+ double rho, theta;
+ double cos_theta, sin_theta;
+ double m, c, c_max;
+ CLine line;
+
+ int x, y, xf, yf;
+
+ for(size_t n(0); n < hlines.size(); n++)
+ {
+ if(hlines[n][0] < 0)
+ {
+ rho = -hlines[n][0];
+ theta = hlines[n][1] - CV_PI;
+ }
+
+ else
+ {
+ rho = hlines[n][0];
+ theta = hlines[n][1];
+ }
+
+ if (rho == 0 && theta == 0)
+ continue;
+
+ if (fabs(theta) < 0.00001)
+ {
+ x = xf = static_cast(rho + 0.5);
+ y = 0;
+ yf = canny_image.rows - 1;
+ }
+
+ else
+ {
+ cos_theta = cos(theta);
+ sin_theta = sin(theta);
+ m = -cos_theta / sin_theta;
+ c = rho * (sin_theta - m * cos_theta);
+
+ if (c >= 0)
+ {
+ if (c < canny_image.rows)
+ {
+ x = 0;
+ y = static_cast(c);
+ }
+ else
+ {
+ y = canny_image.rows - 1;
+ x = static_cast((y - c) / m);
+ }
+ }
+
+ else
+ {
+ x = static_cast(-c / m);
+ y = 0;
+ }
+
+ c_max = m * (canny_image.cols - 1) + c;
+ if (c_max >= 0)
+ {
+ if (c_max < canny_image.rows)
+ {
+ xf = canny_image.cols - 1;
+ yf = static_cast(c_max);
+ }
+
+ else
+ {
+ yf = canny_image.rows - 1;
+ xf = static_cast((yf - c) / m);
+ }
+ }
+ else
+ {
+ xf = static_cast(-c / m);
+ yf = 0;
+ }
+ }
+
+ line.rho = rho;
+ line.theta = theta;
+ line.m = m;
+ line.c = c;
+
+ // Bresenham algorithm
+
+ bool onSegment = false;
+ int memory;
+ int memoryX = 0, memoryY = 0;
+ int xPrev = 0, yPrev = 0;
+ size_t nbPixels = 0;
+
+ int w = xf - x;
+ int h = yf - y;
+ int dx1, dy1, dx2, dy2 = 0;
+
+ int longest, shortest;
+ int numerator;
+
+ if (w < 0)
+ {
+ longest = -w;
+ dx1 = -1;
+ dx2 = -1;
+ }
+ else
+ {
+ longest = w;
+ dx1 = 1;
+ dx2 = 1;
+ }
+
+ if (h < 0)
+ {
+ shortest = -h;
+ dy1 = -1;
+ }
+
+ else
+ {
+ shortest = h;
+ dy1 = 1;
+ }
+
+ if (longest <= shortest)
+ {
+ memory = longest;
+ longest = shortest;
+ shortest = memory;
+ dx2 = 0;
+ if (h < 0)
+ {
+ dy2 = -1;
+ }
+
+ else
+ {
+ dy2 = 1;
+ }
+ }
+
+ numerator = longest / 2;
+
+ for (int i(0); i <= longest; ++i)
+ {
+ if (onSegment)
+ {
+ if (canny_image.at(y, x) == 0 || i == longest)
+ {
+ onSegment = false;
+ if (nbPixels >= params->seg.hthreshold)
+ {
+ std::array end_points;
+ end_points[0] = Eigen::Vector2i(memoryX, memoryY);
+ end_points[1] = Eigen::Vector2i(xPrev, yPrev);
+
+ Eigen::Vector2i mean_point = Eigen::Vector2i((memoryX + xPrev)/2, (memoryY + yPrev)/2);
+
+ Eigen::Vector3f ray;
+ ray[0] = (mean_point[0] - camera_params.cx())/camera_params.fx();
+ ray[1] = (mean_point[1] - camera_params.cy())/camera_params.fy();
+ ray[2] = 1;
+
+ Eigen::Vector2i l_temp = (end_points[0] - end_points[1]);
+ Eigen::Vector3i l(l_temp[0], l_temp[1], 0);
+
+ Eigen::Vector3f normal;
+ mrpt::math::crossProduct3D(l, ray, normal);
+
+ Eigen::Vector3f p;
+ utils::backprojectTo3D(mean_point, range, camera_params, p);
+
+ std::array end_points3D;
+ utils::backprojectTo3D(end_points[0], range, camera_params, end_points3D[0]);
+ utils::backprojectTo3D(end_points[1], range, camera_params, end_points3D[1]);
+
+ Eigen::Vector3f v;
+ v = end_points3D[1] - end_points3D[0];
+
+ line.end_points = end_points;
+ line.end_points3D[0] = intensity_to_depth_transform * end_points3D[0];
+ line.end_points3D[1] = intensity_to_depth_transform * end_points3D[1];
+ line.mean_point = mean_point;
+ line.ray = intensity_to_depth_transform * ray;
+ line.l = l;
+ line.normal = intensity_to_depth_transform * normal;
+ line.p = intensity_to_depth_transform * p;
+ line.v = intensity_to_depth_transform * v;
+ lines.push_back(line);
+ }
+ }
+
+ else
+ {
+ ++nbPixels;
+ }
+ }
+
+ else if (canny_image.at(y, x) != 0)
+ {
+ onSegment = true;
+ nbPixels = 0;
+ memoryX = x;
+ memoryY = y;
+ }
+
+ xPrev = x;
+ yPrev = y;
+
+ numerator += shortest;
+
+ if(numerator >= longest)
+ {
+ numerator -= longest;
+ x += dx1;
+ y += dy1;
+ }
+
+ else
+ {
+ x += dx2;
+ y += dy2;
+ }
+ }
+ }
+}
+
+void CCalibFromLines::findPotentialMatches(const std::vector> &lines, const int &set_id)
+{
+ std::vector sensor_pose_uncertainties = sync_model->getSensorUncertainties();
+
+ for(int i = 0; i < lines.size()-1; ++i)
+ for(int j = i+1; j < lines.size(); ++j)
+ {
+ for(int ii = 0; ii < lines[i].size(); ++ii)
+ {
+ for(int jj = 0; jj < lines[j].size(); ++jj)
+ {
+ Eigen::Vector3f n_ii = sync_model->getSensorPoses()[i].block(0,0,3,3) * lines[i][ii].normal;
+ Eigen::Vector3f v_ii = sync_model->getSensorPoses()[i].block(0,0,3,3) * lines[i][ii].v;
+
+ Eigen::Vector3f n_jj = sync_model->getSensorPoses()[j].block(0,0,3,3) * lines[j][jj].normal;
+ Eigen::Vector3f v_jj = sync_model->getSensorPoses()[j].block(0,0,3,3) * lines[j][jj].v;
+
+ if((n_ii.dot(n_jj) > cos(sensor_pose_uncertainties[j](0) * (M_PI/180))) && (n_ii.dot(v_jj) < cos((90 - sensor_pose_uncertainties[j](0)) * (M_PI/180))))
+ {
+ std::array potential_match{set_id, ii, jj};
+ mmv_line_corresp[i][j].push_back(potential_match);
+ }
+ }
+ }
+
+// for stats printing when no matches exist
+// if((lines[i].size() == 0) || (lines[j].size() == 0))
+// {
+// std::array temp_match{-1, -1, -1};
+// mmv_line_corresp[i][j].push_back(temp_match);
+// }
+ }
+}
+
+Scalar CCalibFromLines::computeRotationResidual()
+{
+}
+
+Scalar CCalibFromLines::computeRotation()
+{
+}
+
+Scalar CCalibFromLines::computeTranslation()
+{
+}
+
diff --git a/core/calib_solvers/CCalibFromLines.h b/core/calib_solvers/CCalibFromLines.h
new file mode 100755
index 0000000..1f8e1dc
--- /dev/null
+++ b/core/calib_solvers/CCalibFromLines.h
@@ -0,0 +1,66 @@
+#pragma once
+
+#include "CExtrinsicCalib.h"
+#include "TCalibFromLinesParams.h"
+#include "CLine.h"
+
+#include
+#include
+#include
+#include
+
+/**
+ * \brief Exploit 3D line observations from a set of sensors to perform extrinsic calibration.
+ */
+
+class CCalibFromLines : public CExtrinsicCalib
+{
+protected:
+
+ /** The parameters for the calibration. */
+ TCalibFromLinesParams *params;
+
+ /** The segmented lines, the vector indices to access them are [sensor_id][obs_id][line_id]
+ * obs_id is with respect to the synchronized model.
+ */
+ std::map>> mvv_lines;
+
+ /** The line correspondences between the different sensors.
+ * The map indices correspond to the sensor ids, with the list of correspondeces
+ * stored as a matrix with each row of the form - set_id, line_id1, line_id2.
+ */
+ std::map>>> mmv_line_corresp;
+
+ CCalibFromLines(CObservationTree *model, TCalibFromLinesParams *params);
+ ~CCalibFromLines();
+
+ /**
+ * \brief Runs Canny-Hough, Bresenham algorithm, and subsequent back-projection to extract 2D and 3D lines from RGB-D images.
+ * The 3D lines extracted are in the depth sensor's frame.
+ * \param image the input image
+ * \param lines vector of lines segmented
+ */
+ void segmentLines(const cv::Mat &image, Eigen::MatrixXf &range, const mrpt::img::TCamera &camera_params, const Eigen::Affine3f &intensity_to_depth_transform, std::vector &lines);
+
+ /**
+ * Search for potential line matches between each sensor pair in a syc obs set.
+ * \param lines lines extracted from sensor observations that belong to the same synchronized set. lines[sensor_id][line_id] gives a line.
+ * \param set_id the id of the synchronized set the lines belong to.
+ * \param params the parameters for line matching.
+ */
+ void findPotentialMatches(const std::vector> &lines, const int &set_id);
+
+ /** Calculate the angular residual error of the correspondences.
+ * \return the residual
+ */
+ virtual Scalar computeRotationResidual();
+
+ /** Compute Calibration (only rotation).
+ * \return the residual
+ */
+ virtual Scalar computeRotation();
+
+ /** Compute Calibration (only translation).
+ \return the residual */
+ virtual Scalar computeTranslation();
+};
diff --git a/core/calib_solvers/CCalibFromPlanes.cpp b/core/calib_solvers/CCalibFromPlanes.cpp
new file mode 100755
index 0000000..ed7929c
--- /dev/null
+++ b/core/calib_solvers/CCalibFromPlanes.cpp
@@ -0,0 +1,440 @@
+
+/* +------------------------------------------------------------------------+
+ | Mobile Robot Programming Toolkit (MRPT) |
+ | http://www.mrpt.org/ |
+ | |
+ | Copyright (c) 2005-2018, Individual contributors, see AUTHORS file |
+ | See: http://www.mrpt.org/Authors - All rights reserved. |
+ | Released under BSD License. See details in http://www.mrpt.org/License |
+ +------------------------------------------------------------------------+ */
+
+#include "CCalibFromPlanes.h"
+#include
+
+#include
+
+#include
+#include
+#include
+#include
+#include
+#include
+
+using namespace std;
+
+CCalibFromPlanes::CCalibFromPlanes(CObservationTree *model, TCalibFromPlanesParams *params) :
+ CExtrinsicCalib(model)
+{
+ for(int sensor_id1=0; sensor_id1 < sync_model->getNumberOfSensors(); sensor_id1++)
+ {
+ mvv_planes[sensor_id1] = std::vector>();
+ mmv_plane_corresp[sensor_id1] = std::map>>();
+ for(int sensor_id2 = sensor_id1 + 1; sensor_id2 < sync_model->getNumberOfSensors(); sensor_id2++)
+ mmv_plane_corresp[sensor_id1][sensor_id2] = std::vector>();
+ }
+
+ this->params = params;
+}
+
+void CCalibFromPlanes::segmentPlanes(const pcl::PointCloud::Ptr &cloud, std::vector & planes)
+{
+ unsigned min_inliers = params->seg.min_inliers_frac * cloud->size();
+
+ pcl::IntegralImageNormalEstimation normal_estimation;
+
+ if(params->seg.normal_estimation_method == 0)
+ normal_estimation.setNormalEstimationMethod(normal_estimation.COVARIANCE_MATRIX);
+ else if(params->seg.normal_estimation_method == 1)
+ normal_estimation.setNormalEstimationMethod(normal_estimation.AVERAGE_3D_GRADIENT);
+ else
+ normal_estimation.setNormalEstimationMethod(normal_estimation.AVERAGE_DEPTH_CHANGE);
+
+ normal_estimation.setDepthDependentSmoothing(params->seg.depth_dependent_smoothing);
+ normal_estimation.setMaxDepthChangeFactor(params->seg.max_depth_change_factor);
+ normal_estimation.setNormalSmoothingSize(params->seg.normal_smoothing_size);
+
+ pcl::PointCloud::Ptr normal_cloud(new pcl::PointCloud);
+ normal_estimation.setInputCloud(cloud);
+ normal_estimation.compute(*normal_cloud);
+
+ pcl::OrganizedMultiPlaneSegmentation multi_plane_segmentation;
+ multi_plane_segmentation.setMinInliers(min_inliers);
+ multi_plane_segmentation.setAngularThreshold(params->seg.angle_threshold);
+ multi_plane_segmentation.setDistanceThreshold(params->seg.dist_threshold);
+ multi_plane_segmentation.setInputNormals(normal_cloud);
+ multi_plane_segmentation.setInputCloud(cloud);
+
+ std::vector, Eigen::aligned_allocator>> regions;
+ std::vector, Eigen::aligned_allocator>> unique_regions;
+ std::vector model_coefficients;
+ std::vector inlier_indices;
+ pcl::PointCloud::Ptr labels(new pcl::PointCloud);
+ std::vector label_indices;
+ std::vector boundary_indices;
+
+ multi_plane_segmentation.segmentAndRefine(regions, model_coefficients, inlier_indices, labels, label_indices, boundary_indices);
+
+ // Create a vector with the planes detected in this frame, and calculate their parameters (normal, center, pointclouds, etc.)
+
+ mrpt::pbmap::PbMap pbmap;
+
+ for (size_t i = 0; i < regions.size(); i++)
+ {
+ CPlaneCHull planehull;
+
+ if(regions[i].getCurvature() > params->seg.max_curvature)
+ continue;
+
+ mrpt::pbmap::Plane plane;
+ plane.v3center = regions[i].getCentroid ();
+ plane.v3normal = Eigen::Vector3f(model_coefficients[i].values[0], model_coefficients[i].values[1], model_coefficients[i].values[2]);
+ plane.d = model_coefficients[i].values[3];
+
+ // Force the normal vector to point towards the camera
+ if( model_coefficients[i].values[3] < 0)
+ {
+ plane.v3normal = -plane.v3normal;
+ plane.d = -plane.d;
+ }
+
+ plane.curvature = regions[i].getCurvature();
+
+ // Extract the planar inliers from the input cloud
+ pcl::ExtractIndices extract;
+ extract.setInputCloud ( cloud );
+ extract.setIndices ( boost::make_shared (inlier_indices[i]) );
+ extract.setNegative (false);
+ extract.filter (*plane.planePointCloudPtr);
+ plane.inliers = inlier_indices[i].indices;
+
+ pcl::PointCloud::Ptr contourPtr(new pcl::PointCloud);
+ contourPtr->points = regions[i].getContour();
+ plane.calcConvexHull(contourPtr);
+
+ // Check whether this region correspond to the same plane as a previous one (this situation may happen when there exists a small discontinuity in the observation)
+
+ bool isSamePlane = false;
+ for (size_t j = 0; j < pbmap.vPlanes.size(); j++)
+ if(pbmap.vPlanes[j].isSamePlane(plane, params->seg.max_cos_normal, params->seg.dist_centre_plane_threshold, params->seg.proximity_threshold))
+ {
+ isSamePlane = true;
+ pbmap.vPlanes[j].mergePlane(plane);
+ boundary_indices.erase(boundary_indices.begin() + j);
+ break;
+ }
+
+ if(!isSamePlane)
+ {
+ pbmap.vPlanes.push_back(plane);
+ unique_regions.push_back(regions[i]);
+ }
+ }
+
+ pcl::PointCloud::Ptr contourPtr(new pcl::PointCloud);
+ planes.resize(pbmap.vPlanes.size());
+ for (size_t i = 0; i < pbmap.vPlanes.size (); i++)
+ {
+ planes[i].v3normal = pbmap.vPlanes[i].v3normal;
+ planes[i].v3center = pbmap.vPlanes[i].v3center;
+ planes[i].d = pbmap.vPlanes[i].d;
+
+ planes[i].ConvexHullPtr = pbmap.vPlanes[i].polygonContourPtr;
+ planes[i].v_inliers = pbmap.vPlanes[i].inliers;
+
+
+ contourPtr->points = unique_regions[i].getContour();
+ pbmap.vPlanes[i].calcConvexHull(contourPtr, planes[i].v_hull_indices);
+ for (size_t j = 0; j < planes[i].v_hull_indices.size(); j++)
+ planes[i].v_hull_indices[j] = boundary_indices[i].indices[planes[i].v_hull_indices[j]];
+ }
+}
+
+void CCalibFromPlanes::findPotentialMatches(const std::vector> &planes, const int &set_id)
+{
+ std::vector sensor_pose_uncertainties = sync_model->getSensorUncertainties();
+
+ for(int i = 0; i < planes.size()-1; ++i)
+ for(int j = i+1; j < planes.size(); ++j)
+ {
+ for(int ii = 0; ii < planes[i].size(); ++ii)
+ {
+ for(int jj = 0; jj < planes[j].size(); ++jj)
+ {
+ Eigen::Vector3f n_ii = sync_model->getSensorPoses()[i].block(0,0,3,3) * planes[i][ii].v3normal;
+ Scalar d1 = planes[i][ii].d - Eigen::Vector3f(sync_model->getSensorPoses()[i].block(0,3,3,1)).dot(n_ii);
+ Eigen::Vector3f n_jj = sync_model->getSensorPoses()[j].block(0,0,3,3) * planes[j][jj].v3normal;
+ Scalar d2 = planes[j][jj].d - Eigen::Vector3f(sync_model->getSensorPoses()[j].block(0,3,3,1)).dot(n_jj);
+ if((n_ii.dot(n_jj) > cos(sensor_pose_uncertainties[j][0] * (M_PI/180))) && (d1 - d2 < sensor_pose_uncertainties[j][1]))
+ {
+ std::array potential_match{set_id, ii, jj};
+ mmv_plane_corresp[i][j].push_back(potential_match);
+ }
+ }
+ }
+
+// // for stats printing when no matches exist
+// if((planes[i].size() == 0) || (planes[j].size() == 0))
+// {
+// std::array temp_match{-1, -1, -1};
+// mmv_plane_corresp[i][j].push_back(temp_match);
+// }
+ }
+}
+
+Scalar CCalibFromPlanes::computeRotationResidual()
+{
+ std::vector sensor_poses = sync_model->getSensorPoses();
+ Scalar sum_squared_error = 0.; // Accumulated squared error for all plane correspondences
+
+ for(std::map>>>::iterator it_sensor_i = mmv_plane_corresp.begin();
+ it_sensor_i != mmv_plane_corresp.end(); it_sensor_i++)
+ {
+ size_t sensor_i = it_sensor_i->first;
+ for(std::map>>::iterator it_sensor_j = it_sensor_i->second.begin();
+ it_sensor_j != it_sensor_i->second.end(); it_sensor_j++)
+ {
+ size_t sensor_j = it_sensor_j->first;
+ std::vector> &correspondences = it_sensor_j->second;
+ for(int i = 0; i < correspondences.size(); i++)
+ {
+ int set_id = correspondences[i][0];
+ int sync_obs1_id = sync_model->findSyncIndexFromSet(set_id, sync_model->getSensorLabels()[sensor_i]);
+ int sync_obs2_id = sync_model->findSyncIndexFromSet(set_id, sync_model->getSensorLabels()[sensor_j]);
+
+ Eigen::Vector3f n_obs_i = mvv_planes[sensor_i][sync_obs1_id][correspondences[i][1]].v3normal;
+ Eigen::Vector3f n_obs_j = mvv_planes[sensor_j][sync_obs2_id][correspondences[i][2]].v3normal;
+
+ Eigen::Vector3f n_i = sensor_poses[sensor_i].block(0,0,3,3) * n_obs_i;
+ Eigen::Vector3f n_j = sensor_poses[sensor_j].block(0,0,3,3) * n_obs_j;
+
+ Eigen::Vector3f rot_error = (n_i - n_j);
+ sum_squared_error += rot_error.dot(rot_error);
+ }
+ }
+ }
+
+ return sum_squared_error;
+}
+
+Scalar CCalibFromPlanes::computeRotation()
+{
+ std::vector sensor_poses = sync_model->getSensorPoses();
+ const int num_sensors = sensor_poses.size();
+ const int dof = 3 * (num_sensors - 1);
+ Eigen::VectorXf update_vector(dof);
+ Eigen::Matrix3f jacobian_rot_i, jacobian_rot_j; // Jacobians of the rotation
+ float error, new_error, init_error;
+ float av_angle_error;
+
+ std::vector estimated_poses = sensor_poses;
+ std::vector estimated_poses_temp(sensor_poses.size());
+
+ float increment = 1000, diff_error = 1000;
+ int it = 0;
+
+ init_error = computeRotationResidual();
+
+ while(it < params->solver.max_iters && increment > params->solver.min_update && diff_error > params->solver.converge_error)
+ {
+ // Calculate the hessian and the gradient
+ hessian = Eigen::Matrix::Zero(dof, dof); // Hessian of the rotation of the decoupled system
+ gradient = Eigen::Matrix::Zero(dof); // Gradient of the rotation of the decoupled system
+ error = 0.0;
+ av_angle_error = 0.0;
+
+ for(std::map>>>::iterator it_sensor_i = mmv_plane_corresp.begin();
+ it_sensor_i != mmv_plane_corresp.end(); it_sensor_i++)
+ {
+ size_t sensor_i = it_sensor_i->first;
+ for(std::map>>::iterator it_sensor_j = it_sensor_i->second.begin();
+ it_sensor_j != it_sensor_i->second.end(); it_sensor_j++)
+ {
+ size_t sensor_j = it_sensor_j->first;
+ int pos_sensor_i = 3 * (sensor_i - 1);
+ int pos_sensor_j = 3 * (sensor_j - 1);
+
+ std::vector> &correspondences = it_sensor_j->second;
+ for(int i = 0; i < correspondences.size(); i++)
+ {
+ size_t set_id = correspondences[i][0];
+
+ int sync_obs1_id = sync_model->findSyncIndexFromSet(set_id, sync_model->getSensorLabels()[sensor_i]);
+ int sync_obs2_id = sync_model->findSyncIndexFromSet(set_id, sync_model->getSensorLabels()[sensor_j]);
+
+ Eigen::Vector3f n_obs_i = mvv_planes[sensor_i][sync_obs1_id][correspondences[i][1]].v3normal;
+ Eigen::Vector3f n_obs_j = mvv_planes[sensor_j][sync_obs2_id][correspondences[i][2]].v3normal;
+
+ Eigen::Vector3f n_i = sensor_poses[sensor_i].block(0,0,3,3) * n_obs_i;
+ Eigen::Vector3f n_j = sensor_poses[sensor_j].block(0,0,3,3) * n_obs_j;
+
+ jacobian_rot_i = -skew(n_i);
+ jacobian_rot_j = skew(n_j);
+
+ Eigen::Vector3f rot_error = (n_i - n_j);
+ error += rot_error.dot(rot_error);
+ av_angle_error += acos(n_i.dot(n_j));
+
+ if(sensor_i != 0) // The pose of the first camera is fixed
+ {
+ hessian.block(pos_sensor_i, pos_sensor_i, 3, 3) += jacobian_rot_i.transpose() * jacobian_rot_i;
+ gradient.block(pos_sensor_i,0,3,1) += jacobian_rot_i.transpose() * rot_error;
+
+ // Cross term
+ hessian.block(pos_sensor_i, pos_sensor_j, 3, 3) += jacobian_rot_i.transpose() * jacobian_rot_j;
+ }
+
+ hessian.block(pos_sensor_j, pos_sensor_j, 3, 3) += jacobian_rot_j.transpose() * jacobian_rot_j;
+ gradient.block(pos_sensor_j,0,3,1) += jacobian_rot_j.transpose() * rot_error;
+ }
+
+ if(sensor_i != 0) // Fill the lower left triangle with the corresponding cross terms
+ hessian.block(pos_sensor_j, pos_sensor_i, 3, 3) = hessian.block(pos_sensor_i, pos_sensor_j, 3, 3).transpose();
+ }
+ }
+
+ //cout << "hessian \n" << hessian << endl;
+ //cout << "gradient \n" << gradient.transpose() << endl;
+ //cout << "Error accumulated " << accum_error2 << endl;
+
+ Eigen::FullPivLU> lu(hessian);
+ if(!lu.isInvertible())
+ {
+ result.msg = "System is badly conditioned. Please try again with a new set of observations.";
+ break;
+ }
+
+ // Update rotation
+ update_vector = -hessian.inverse() * gradient;
+ //cout << "update_vector " << update_vector.transpose() << endl;
+
+ for(int sensor_id = 1; sensor_id < num_sensors; sensor_id++)
+ {
+ mrpt::poses::CPose3D pose;
+ mrpt::math::CArrayNumeric rot_manifold;
+ rot_manifold[0] = update_vector(3*sensor_id-3,0);
+ rot_manifold[1] = update_vector(3*sensor_id-2,0);
+ rot_manifold[2] = update_vector(3*sensor_id-1,0);
+ mrpt::math::CMatrixDouble33 update_rot = pose.exp_rotation(rot_manifold);
+ //cout << "update_rot\n" << update_rot << endl;
+ Eigen::Matrix3f update_rot_eig;
+ update_rot_eig << update_rot(0,0), update_rot(0,1), update_rot(0,2),
+ update_rot(1,0), update_rot(1,1), update_rot(1,2),
+ update_rot(2,0), update_rot(2,1), update_rot(2,2);
+ estimated_poses_temp[sensor_id] = estimated_poses[sensor_id];
+ estimated_poses_temp[sensor_id].block(0,0,3,3) = update_rot_eig * estimated_poses[sensor_id].block(0,0,3,3);
+ //cout << "old rotation" << sensor_id << "\n" << Rt_estimated[sensor_id].block(0,0,3,3) << endl;
+ //cout << "new rotation\n" << Rt_estimatedTemp[sensor_id].block(0,0,3,3) << endl;
+ }
+
+ //accum_error = computeRotationResidual(estimated_poses);
+ //cout << "Error accumulated " << accum_error2 << endl;
+ new_error = computeRotationResidual();
+
+ //cout << "New rotation error " << new_accum_error2 << endl;
+ //cout << "Closing loop? \n" << Rt_estimated[0].inverse() * Rt_estimated[7] * Rt_78;
+
+ //Assign new rotations
+ if(new_error < error)
+ for(int sensor_id = 1; sensor_id < num_sensors; sensor_id++)
+ estimated_poses[sensor_id] = estimated_poses_temp[sensor_id];
+
+ increment = update_vector.dot(update_vector);
+ diff_error = error - new_error;
+ ++it;
+ //cout << "Iteration " << it << " increment " << increment << " diff_error " << diff_error << endl;
+ }
+
+ if(it == params->solver.max_iters)
+ result.msg = "Maximum iterations reached.";
+ else if(increment < params->solver.min_update)
+ result.msg = "Increment too small.";
+ else if(diff_error < params->solver.converge_error)
+ result.msg = "Convergence";
+
+ result.init_error = init_error;
+ result.num_iters = it;
+ result.final_error = new_error;
+ result.estimate = estimated_poses;
+
+ //std::cout << "ErrorCalibRotation " << accum_error2/numPlaneCorresp << " " << av_angle_error/numPlaneCorresp << std::endl;
+}
+
+Scalar CCalibFromPlanes::computeTranslation()
+{
+ std::vector sensor_poses = sync_model->getSensorPoses();
+ const int num_sensors = sensor_poses.size();
+ const int dof = 3 * (num_sensors - 1);
+ Eigen::VectorXf update_vector(dof);
+ Eigen::Matrix3f jacobian_trans_i, jacobian_trans_j; // Jacobians of the rotation
+ float accum_error2(0.f);
+
+ for(std::map>>>::iterator it_sensor_i = mmv_plane_corresp.begin();
+ it_sensor_i != mmv_plane_corresp.end(); it_sensor_i++)
+ {
+ size_t sensor_i = it_sensor_i->first;
+ for(std::map>>::iterator it_sensor_j = it_sensor_i->second.begin();
+ it_sensor_j != it_sensor_i->second.end(); it_sensor_j++)
+ {
+ size_t sensor_j = it_sensor_j->first;
+ int pos_sensor_i = 3 * (sensor_i - 1);
+ int pos_sensor_j = 3 * (sensor_j - 1);
+
+ std::vector> &correspondences = it_sensor_j->second;
+ for(int i = 0; i < correspondences.size(); i++)
+ {
+ size_t set_id = correspondences[i][0];
+
+ int sync_obs1_id = sync_model->findSyncIndexFromSet(set_id, sync_model->getSensorLabels()[sensor_i]);
+ int sync_obs2_id = sync_model->findSyncIndexFromSet(set_id, sync_model->getSensorLabels()[sensor_j]);
+
+ Eigen::Vector3f n_obs_i = mvv_planes[sensor_i][sync_obs1_id][correspondences[i][1]].v3normal;
+ Eigen::Vector3f n_obs_j = mvv_planes[sensor_j][sync_obs2_id][correspondences[i][2]].v3normal;
+
+ Eigen::Vector3f n_i = sensor_poses[sensor_i].block(0,0,3,3) * n_obs_i;
+ Eigen::Vector3f n_j = sensor_poses[sensor_j].block(0,0,3,3) * n_obs_j;
+
+ float trans_error = mvv_planes[sensor_i][sync_obs1_id][correspondences[i][1]].d - mvv_planes[sensor_j][sync_obs2_id][correspondences[i][2]].d; // Consider all zero initial translations
+ //trans_error = (mvv_planes[sensor_i][sync_obs1_id][correspondences[i][1]].d - Eigen::Vector3fsensor_poses[sensor_i].block(0,3,3,1)).dot(n_i) - (mvv_planes[sensor_j][sync_obs2_id][correspondences[i][2]].d - Eigen::Vector3f(sensor_poses[sensor_j].block(0,3,3,1)).dot(n_j));
+ accum_error2 += trans_error * trans_error;
+
+ jacobian_trans_i = n_i * n_i.transpose();
+ jacobian_trans_j = -n_i * n_j.transpose();
+
+ if(sensor_i != 0) // The pose of the first camera is fixed
+ {
+ hessian.block(pos_sensor_i, pos_sensor_i, 3, 3) += jacobian_trans_i.transpose() * jacobian_trans_i;
+ gradient.block(pos_sensor_i,0,3,1) += jacobian_trans_i.transpose() * trans_error;
+
+ // Cross term
+ hessian.block(pos_sensor_i, pos_sensor_j, 3, 3) += jacobian_trans_i.transpose() * jacobian_trans_j;
+ }
+
+ hessian.block(pos_sensor_j, pos_sensor_j, 3, 3) += jacobian_trans_j.transpose() * jacobian_trans_j;
+ gradient.block(pos_sensor_j,0,3,1) += jacobian_trans_j.transpose() * trans_error;
+ }
+
+ if(sensor_i != 0) // Fill the lower left triangle with the corresponding cross terms
+ hessian.block(pos_sensor_j, pos_sensor_i, 3, 3) = hessian.block(pos_sensor_i, pos_sensor_j, 3, 3).transpose();
+ }
+ }
+
+ //cout << "hessian \n" << hessian << endl;
+ //cout << "gradient \n" << gradient.transpose() << endl;
+ //cout << "Error accumulated " << accum_error2 << endl;
+
+ Eigen::FullPivLU> lu(hessian);
+ if(!lu.isInvertible())
+ {
+ result.msg = "System is badly conditioned. Please try again with a new set of observations.";
+ return accum_error2;
+ }
+
+ // Update the translation (Linear Least-Squares -> exact solution)
+ update_vector = -hessian.inverse() * gradient;
+ //cout << "update_vector " << update_vector.transpose() << endl;
+
+// for(int sensor_id = 1; sensor_id < num_sensors; sensor_id++)
+// sensor_poses[sensor_id].block(0,3,3,1) = update_vector.block(3*(sensor_id-1),0,3,1);
+
+ return accum_error2;
+}
diff --git a/core/calib_solvers/CCalibFromPlanes.h b/core/calib_solvers/CCalibFromPlanes.h
new file mode 100755
index 0000000..bb193a3
--- /dev/null
+++ b/core/calib_solvers/CCalibFromPlanes.h
@@ -0,0 +1,97 @@
+/* +------------------------------------------------------------------------+
+ | Mobile Robot Programming Toolkit (MRPT) |
+ | http://www.mrpt.org/ |
+ | |
+ | Copyright (c) 2005-2018, Individual contributors, see AUTHORS file |
+ | See: http://www.mrpt.org/Authors - All rights reserved. |
+ | Released under BSD License. See details in http://www.mrpt.org/License |
+ +------------------------------------------------------------------------+ */
+#pragma once
+
+#include "CExtrinsicCalib.h"
+#include "TCalibFromPlanesParams.h"
+#include "TSolverResult.h"
+#include
+//#include
+//#include
+#include