-
Notifications
You must be signed in to change notification settings - Fork 25
/
Copy pathAguiar2021ESWA.bib
12 lines (12 loc) · 2.12 KB
/
Aguiar2021ESWA.bib
1
2
3
4
5
6
7
8
9
10
11
12
@article{PINTODEAGUIAR2021114894,
title = {A Camera to LiDAR calibration approach through the Optimization of Atomic Transformations},
journal = {Expert Systems with Applications},
pages = {114894},
year = {2021},
issn = {0957-4174},
doi = {https://doi.org/10.1016/j.eswa.2021.114894},
url = {https://www.sciencedirect.com/science/article/pii/S0957417421003353},
author = {André Silva {Pinto de Aguiar} and Miguel Armando {Riem de Oliveira} and Eurico Farinha Pedrosa and Filipe Baptista {Neves dos Santos}},
keywords = {Computer Vision},
abstract = {This paper proposes a camera-to-3D Light Detection And Ranging calibration framework through the optimization of atomic transformations. The system is able to simultaneously calibrate multiple cameras with Light Detection And Ranging sensors, solving the problem of Bundle. In comparison with the state-of-the-art, this work presents several novelties: the ability to simultaneously calibrate multiple cameras and LiDARs; the support for multiple sensor modalities; the calibration through the optimization of atomic transformations, without changing the topology of the input transformation tree; and the integration of the calibration framework within the Robot Operating System (ROS) framework. The software pipeline allows the user to interactively position the sensors for providing an initial estimate, to label and collect data, and visualize the calibration procedure. To test this framework, an agricultural robot with a stereo camera and a 3D Light Detection And Ranging sensor was used. Pairwise calibrations and a single calibration of the three sensors were tested and evaluated. Results show that the proposed approach produces accurate calibrations when compared to the state-of-the-art, and is robust to harsh conditions such as inaccurate initial guesses or small amount of data used in calibration. Experiments have shown that our optimization process can handle an angular error of approximately 20 degrees and a translation error of 0.5 meters, for each sensor. Moreover, the proposed approach is able to achieve state-of-the-art results even when calibrating the entire system simultaneously.}
}