diff --git a/README.md b/README.md index 2bc4517..b0a90c9 100644 --- a/README.md +++ b/README.md @@ -1,9 +1,19 @@ # multisense_ros -Wrappers, drivers, tools and additional API's for using MultiSense SL, S7, S7S, S21, M, ST21, and BCAM with ROS. +Wrappers, drivers, tools and additional API's for using MultiSense S27, S30, KS21, SL, S7, S7S, S21, M, ST21, BCAM with ROS. ### Installation, Documentation and Tutorials -[Installing multisense_ros](http://docs.carnegierobotics.com/SL/install.html#install) + +See the following for documentation on on installing and using multisense_ros for specific camera types. + +- [S27](https://docs.carnegierobotics.com/S27/index.html) +- [S30](https://docs.carnegierobotics.com/S30/index.html) +- [KS21](https://docs.carnegierobotics.com/KS21/index.html) +- [S21](https://docs.carnegierobotics.com/S21/index.html) +- [S7/S7S](https://docs.carnegierobotics.com/S7/index.html) +- [SL](https://docs.carnegierobotics.com/SL/index.html) + +Please see the following link for detailed documentation on the [LibMultiSense API](https://docs.carnegierobotics.com/libmultisense/index.html) wrapped by the ROS driver. ### Develop and Contribute diff --git a/multisense_bringup/CMakeLists.txt b/multisense_bringup/CMakeLists.txt index 152e58c..76ffe5f 100644 --- a/multisense_bringup/CMakeLists.txt +++ b/multisense_bringup/CMakeLists.txt @@ -18,6 +18,7 @@ include_directories(include ${catkin_INCLUDE_DIRS}) install(FILES rviz_config.rviz multisense.launch + remote_head.launch DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} ) diff --git a/multisense_bringup/multisense.launch b/multisense_bringup/multisense.launch index 8f6e607..5f24a9f 100644 --- a/multisense_bringup/multisense.launch +++ b/multisense_bringup/multisense.launch @@ -1,11 +1,13 @@ - + + @@ -26,6 +28,7 @@ + diff --git a/multisense_bringup/remote_head.launch b/multisense_bringup/remote_head.launch new file mode 100644 index 0000000..6513fb0 --- /dev/null +++ b/multisense_bringup/remote_head.launch @@ -0,0 +1,158 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/multisense_description/meshes/multisense_remote_head_monocam.STL b/multisense_description/meshes/multisense_remote_head_monocam.STL new file mode 100644 index 0000000..4030ac9 --- /dev/null +++ b/multisense_description/meshes/multisense_remote_head_monocam.STL @@ -0,0 +1,86 @@ +solid model +facet normal 0.0 0.0 -1.0 +outer loop +vertex 20.0 0.0 0.0 +vertex 0.0 -20.0 0.0 +vertex 0.0 0.0 0.0 +endloop +endfacet +facet normal 0.0 0.0 -1.0 +outer loop +vertex 0.0 -20.0 0.0 +vertex 20.0 0.0 0.0 +vertex 20.0 -20.0 0.0 +endloop +endfacet +facet normal -0.0 -1.0 -0.0 +outer loop +vertex 20.0 -20.0 20.0 +vertex 0.0 -20.0 0.0 +vertex 20.0 -20.0 0.0 +endloop +endfacet +facet normal -0.0 -1.0 -0.0 +outer loop +vertex 0.0 -20.0 0.0 +vertex 20.0 -20.0 20.0 +vertex 0.0 -20.0 20.0 +endloop +endfacet +facet normal 1.0 0.0 0.0 +outer loop +vertex 20.0 0.0 0.0 +vertex 20.0 -20.0 20.0 +vertex 20.0 -20.0 0.0 +endloop +endfacet +facet normal 1.0 0.0 0.0 +outer loop +vertex 20.0 -20.0 20.0 +vertex 20.0 0.0 0.0 +vertex 20.0 0.0 20.0 +endloop +endfacet +facet normal -0.0 -0.0 1.0 +outer loop +vertex 20.0 -20.0 20.0 +vertex 0.0 0.0 20.0 +vertex 0.0 -20.0 20.0 +endloop +endfacet +facet normal -0.0 -0.0 1.0 +outer loop +vertex 0.0 0.0 20.0 +vertex 20.0 -20.0 20.0 +vertex 20.0 0.0 20.0 +endloop +endfacet +facet normal -1.0 0.0 0.0 +outer loop +vertex 0.0 0.0 20.0 +vertex 0.0 -20.0 0.0 +vertex 0.0 -20.0 20.0 +endloop +endfacet +facet normal -1.0 0.0 0.0 +outer loop +vertex 0.0 -20.0 0.0 +vertex 0.0 0.0 20.0 +vertex 0.0 0.0 0.0 +endloop +endfacet +facet normal -0.0 1.0 0.0 +outer loop +vertex 0.0 0.0 20.0 +vertex 20.0 0.0 0.0 +vertex 0.0 0.0 0.0 +endloop +endfacet +facet normal -0.0 1.0 0.0 +outer loop +vertex 20.0 0.0 0.0 +vertex 0.0 0.0 20.0 +vertex 20.0 0.0 20.0 +endloop +endfacet +endsolid model diff --git a/multisense_description/meshes/multisense_remote_head_stereo.STL b/multisense_description/meshes/multisense_remote_head_stereo.STL new file mode 100644 index 0000000..4030ac9 --- /dev/null +++ b/multisense_description/meshes/multisense_remote_head_stereo.STL @@ -0,0 +1,86 @@ +solid model +facet normal 0.0 0.0 -1.0 +outer loop +vertex 20.0 0.0 0.0 +vertex 0.0 -20.0 0.0 +vertex 0.0 0.0 0.0 +endloop +endfacet +facet normal 0.0 0.0 -1.0 +outer loop +vertex 0.0 -20.0 0.0 +vertex 20.0 0.0 0.0 +vertex 20.0 -20.0 0.0 +endloop +endfacet +facet normal -0.0 -1.0 -0.0 +outer loop +vertex 20.0 -20.0 20.0 +vertex 0.0 -20.0 0.0 +vertex 20.0 -20.0 0.0 +endloop +endfacet +facet normal -0.0 -1.0 -0.0 +outer loop +vertex 0.0 -20.0 0.0 +vertex 20.0 -20.0 20.0 +vertex 0.0 -20.0 20.0 +endloop +endfacet +facet normal 1.0 0.0 0.0 +outer loop +vertex 20.0 0.0 0.0 +vertex 20.0 -20.0 20.0 +vertex 20.0 -20.0 0.0 +endloop +endfacet +facet normal 1.0 0.0 0.0 +outer loop +vertex 20.0 -20.0 20.0 +vertex 20.0 0.0 0.0 +vertex 20.0 0.0 20.0 +endloop +endfacet +facet normal -0.0 -0.0 1.0 +outer loop +vertex 20.0 -20.0 20.0 +vertex 0.0 0.0 20.0 +vertex 0.0 -20.0 20.0 +endloop +endfacet +facet normal -0.0 -0.0 1.0 +outer loop +vertex 0.0 0.0 20.0 +vertex 20.0 -20.0 20.0 +vertex 20.0 0.0 20.0 +endloop +endfacet +facet normal -1.0 0.0 0.0 +outer loop +vertex 0.0 0.0 20.0 +vertex 0.0 -20.0 0.0 +vertex 0.0 -20.0 20.0 +endloop +endfacet +facet normal -1.0 0.0 0.0 +outer loop +vertex 0.0 -20.0 0.0 +vertex 0.0 0.0 20.0 +vertex 0.0 0.0 0.0 +endloop +endfacet +facet normal -0.0 1.0 0.0 +outer loop +vertex 0.0 0.0 20.0 +vertex 20.0 0.0 0.0 +vertex 0.0 0.0 0.0 +endloop +endfacet +facet normal -0.0 1.0 0.0 +outer loop +vertex 20.0 0.0 0.0 +vertex 0.0 0.0 20.0 +vertex 20.0 0.0 20.0 +endloop +endfacet +endsolid model diff --git a/multisense_description/meshes/multisense_remote_head_vpb.STL b/multisense_description/meshes/multisense_remote_head_vpb.STL new file mode 100644 index 0000000..4030ac9 --- /dev/null +++ b/multisense_description/meshes/multisense_remote_head_vpb.STL @@ -0,0 +1,86 @@ +solid model +facet normal 0.0 0.0 -1.0 +outer loop +vertex 20.0 0.0 0.0 +vertex 0.0 -20.0 0.0 +vertex 0.0 0.0 0.0 +endloop +endfacet +facet normal 0.0 0.0 -1.0 +outer loop +vertex 0.0 -20.0 0.0 +vertex 20.0 0.0 0.0 +vertex 20.0 -20.0 0.0 +endloop +endfacet +facet normal -0.0 -1.0 -0.0 +outer loop +vertex 20.0 -20.0 20.0 +vertex 0.0 -20.0 0.0 +vertex 20.0 -20.0 0.0 +endloop +endfacet +facet normal -0.0 -1.0 -0.0 +outer loop +vertex 0.0 -20.0 0.0 +vertex 20.0 -20.0 20.0 +vertex 0.0 -20.0 20.0 +endloop +endfacet +facet normal 1.0 0.0 0.0 +outer loop +vertex 20.0 0.0 0.0 +vertex 20.0 -20.0 20.0 +vertex 20.0 -20.0 0.0 +endloop +endfacet +facet normal 1.0 0.0 0.0 +outer loop +vertex 20.0 -20.0 20.0 +vertex 20.0 0.0 0.0 +vertex 20.0 0.0 20.0 +endloop +endfacet +facet normal -0.0 -0.0 1.0 +outer loop +vertex 20.0 -20.0 20.0 +vertex 0.0 0.0 20.0 +vertex 0.0 -20.0 20.0 +endloop +endfacet +facet normal -0.0 -0.0 1.0 +outer loop +vertex 0.0 0.0 20.0 +vertex 20.0 -20.0 20.0 +vertex 20.0 0.0 20.0 +endloop +endfacet +facet normal -1.0 0.0 0.0 +outer loop +vertex 0.0 0.0 20.0 +vertex 0.0 -20.0 0.0 +vertex 0.0 -20.0 20.0 +endloop +endfacet +facet normal -1.0 0.0 0.0 +outer loop +vertex 0.0 -20.0 0.0 +vertex 0.0 0.0 20.0 +vertex 0.0 0.0 0.0 +endloop +endfacet +facet normal -0.0 1.0 0.0 +outer loop +vertex 0.0 0.0 20.0 +vertex 20.0 0.0 0.0 +vertex 0.0 0.0 0.0 +endloop +endfacet +facet normal -0.0 1.0 0.0 +outer loop +vertex 20.0 0.0 0.0 +vertex 0.0 0.0 20.0 +vertex 20.0 0.0 20.0 +endloop +endfacet +endsolid model diff --git a/multisense_description/urdf/multisenseremote_head_monocam/importable.urdf.xacro b/multisense_description/urdf/multisenseremote_head_monocam/importable.urdf.xacro new file mode 100644 index 0000000..258988d --- /dev/null +++ b/multisense_description/urdf/multisenseremote_head_monocam/importable.urdf.xacro @@ -0,0 +1,18 @@ + + + + + + + + + + + + + + + + + + diff --git a/multisense_description/urdf/multisenseremote_head_monocam/multisense_remote_head_monocam.urdf.xacro b/multisense_description/urdf/multisenseremote_head_monocam/multisense_remote_head_monocam.urdf.xacro new file mode 100644 index 0000000..c2b0ae5 --- /dev/null +++ b/multisense_description/urdf/multisenseremote_head_monocam/multisense_remote_head_monocam.urdf.xacro @@ -0,0 +1,68 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/multisense_description/urdf/multisenseremote_head_monocam/standalone.urdf.xacro b/multisense_description/urdf/multisenseremote_head_monocam/standalone.urdf.xacro new file mode 100644 index 0000000..43e1e89 --- /dev/null +++ b/multisense_description/urdf/multisenseremote_head_monocam/standalone.urdf.xacro @@ -0,0 +1,6 @@ + + + + + + diff --git a/multisense_description/urdf/multisenseremote_head_stereo/importable.urdf.xacro b/multisense_description/urdf/multisenseremote_head_stereo/importable.urdf.xacro new file mode 100644 index 0000000..146bbae --- /dev/null +++ b/multisense_description/urdf/multisenseremote_head_stereo/importable.urdf.xacro @@ -0,0 +1,18 @@ + + + + + + + + + + + + + + + + + + diff --git a/multisense_description/urdf/multisenseremote_head_stereo/multisense_remote_head_stereo.urdf.xacro b/multisense_description/urdf/multisenseremote_head_stereo/multisense_remote_head_stereo.urdf.xacro new file mode 100644 index 0000000..c7aa2e3 --- /dev/null +++ b/multisense_description/urdf/multisenseremote_head_stereo/multisense_remote_head_stereo.urdf.xacro @@ -0,0 +1,68 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/multisense_description/urdf/multisenseremote_head_stereo/standalone.urdf.xacro b/multisense_description/urdf/multisenseremote_head_stereo/standalone.urdf.xacro new file mode 100644 index 0000000..4545c10 --- /dev/null +++ b/multisense_description/urdf/multisenseremote_head_stereo/standalone.urdf.xacro @@ -0,0 +1,6 @@ + + + + + + diff --git a/multisense_description/urdf/multisenseremote_head_vpb/importable.urdf.xacro b/multisense_description/urdf/multisenseremote_head_vpb/importable.urdf.xacro new file mode 100644 index 0000000..a51aa2a --- /dev/null +++ b/multisense_description/urdf/multisenseremote_head_vpb/importable.urdf.xacro @@ -0,0 +1,18 @@ + + + + + + + + + + + + + + + + + + diff --git a/multisense_description/urdf/multisenseremote_head_vpb/multisense_remote_head_vpb.urdf.xacro b/multisense_description/urdf/multisenseremote_head_vpb/multisense_remote_head_vpb.urdf.xacro new file mode 100644 index 0000000..22469f0 --- /dev/null +++ b/multisense_description/urdf/multisenseremote_head_vpb/multisense_remote_head_vpb.urdf.xacro @@ -0,0 +1,68 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/multisense_description/urdf/multisenseremote_head_vpb/standalone.urdf.xacro b/multisense_description/urdf/multisenseremote_head_vpb/standalone.urdf.xacro new file mode 100644 index 0000000..e91ea81 --- /dev/null +++ b/multisense_description/urdf/multisenseremote_head_vpb/standalone.urdf.xacro @@ -0,0 +1,6 @@ + + + + + + diff --git a/multisense_lib/sensor_api b/multisense_lib/sensor_api index 23b987b..7265e59 160000 --- a/multisense_lib/sensor_api +++ b/multisense_lib/sensor_api @@ -1 +1 @@ -Subproject commit 23b987b1a6ecbdef7e82f7877e1c9b87f6604456 +Subproject commit 7265e599e11347fbdc94335bc791fcd20acc132d diff --git a/multisense_ros/CMakeLists.txt b/multisense_ros/CMakeLists.txt index 37075c3..f156687 100644 --- a/multisense_ros/CMakeLists.txt +++ b/multisense_ros/CMakeLists.txt @@ -50,7 +50,10 @@ add_message_files(DIRECTORY msg RawLidarCal.msg Histogram.msg DeviceStatus.msg - StampedPps.msg) + StampedPps.msg + AprilTagCornerPoint.msg + AprilTagDetection.msg + AprilTagDetectionArray.msg) generate_messages(DEPENDENCIES sensor_msgs) diff --git a/multisense_ros/cfg/multisense.cfg b/multisense_ros/cfg/multisense.cfg index f91fca4..1a74038 100755 --- a/multisense_ros/cfg/multisense.cfg +++ b/multisense_ros/cfg/multisense.cfg @@ -10,7 +10,7 @@ else: from dynamic_reconfigure.parameter_generator_catkin import * class SensorConfig(object): - def __init__(self, name=None, sgm=False, motor=False, imu=False, lighting=False, aux=False, ar0234=False, features=False): + def __init__(self, name=None, sgm=False, motor=False, imu=False, lighting=False, aux=False, ar0234=False, features=False, ptp=False): self.name = name self.sgm = sgm self.motor = motor @@ -19,31 +19,39 @@ class SensorConfig(object): self.aux = aux self.ar0234 = ar0234 self.features = features + self.ptp = ptp #enddef #endclass class SupportedFeatures(Enum): NONE = 0 GROUND_SURFACE = 1 + APRILTAG = 2 sensorConfigList = [] -sensorConfigList.append(SensorConfig(name="sl_bm_cmv2000", sgm=False, motor=True, imu=False, lighting=True , aux=False, ar0234=False, features=False)) -sensorConfigList.append(SensorConfig(name="sl_bm_cmv2000_imu", sgm=False, motor=True, imu=True , lighting=True , aux=False, ar0234=False, features=False)) -sensorConfigList.append(SensorConfig(name="sl_bm_cmv4000", sgm=False, motor=True, imu=False, lighting=True , aux=False, ar0234=False, features=False)) -sensorConfigList.append(SensorConfig(name="sl_bm_cmv4000_imu", sgm=False, motor=True, imu=True , lighting=True , aux=False, ar0234=False, features=False)) -sensorConfigList.append(SensorConfig(name="sl_sgm_cmv2000_imu", sgm=True, motor=True, imu=True , lighting=True , aux=False, ar0234=False, features=False)) -sensorConfigList.append(SensorConfig(name="sl_sgm_cmv4000_imu", sgm=True, motor=True, imu=True , lighting=True , aux=False, ar0234=False, features=False)) -sensorConfigList.append(SensorConfig(name="st21_sgm_vga_imu", sgm=True, motor=False, imu=True , lighting=False, aux=False, ar0234=False, features=False)) -sensorConfigList.append(SensorConfig(name="mono_cmv2000", sgm=False, motor=False, imu=True , lighting=True , aux=False, ar0234=False, features=False)) -sensorConfigList.append(SensorConfig(name="mono_cmv4000", sgm=False, motor=False, imu=True , lighting=True , aux=False, ar0234=False, features=False)) -sensorConfigList.append(SensorConfig(name="s27_sgm_AR0234", sgm=True, motor=False, imu=False, lighting=False, aux=True, ar0234=True , features=True)) -sensorConfigList.append(SensorConfig(name="ks21_sgm_AR0234", sgm=True, motor=False, imu=False, lighting=True , aux=False, ar0234=True , features=True)) +sensorConfigList.append(SensorConfig(name="sl_bm_cmv2000", sgm=False, motor=True, imu=False, lighting=True, aux=False, ar0234=False, features=False, ptp=False)) +sensorConfigList.append(SensorConfig(name="sl_bm_cmv2000_imu", sgm=False, motor=True, imu=True, lighting=True, aux=False, ar0234=False, features=False, ptp=False)) +sensorConfigList.append(SensorConfig(name="sl_bm_cmv4000", sgm=False, motor=True, imu=False, lighting=True, aux=False, ar0234=False, features=False, ptp=False)) +sensorConfigList.append(SensorConfig(name="sl_bm_cmv4000_imu", sgm=False, motor=True, imu=True, lighting=True, aux=False, ar0234=False, features=False, ptp=False)) +sensorConfigList.append(SensorConfig(name="sl_sgm_cmv2000_imu", sgm=True, motor=True, imu=True, lighting=True, aux=False, ar0234=False, features=False, ptp=False)) +sensorConfigList.append(SensorConfig(name="sl_sgm_cmv4000_imu", sgm=True, motor=True, imu=True, lighting=True, aux=False, ar0234=False, features=False, ptp=False)) +sensorConfigList.append(SensorConfig(name="st21_sgm_vga_imu", sgm=True, motor=False, imu=True, lighting=False, aux=False, ar0234=False, features=False, ptp=False)) +sensorConfigList.append(SensorConfig(name="mono_cmv2000", sgm=False, motor=False, imu=True, lighting=True, aux=False, ar0234=False, features=False, ptp=False)) +sensorConfigList.append(SensorConfig(name="mono_cmv4000", sgm=False, motor=False, imu=True, lighting=True, aux=False, ar0234=False, features=False, ptp=False)) +sensorConfigList.append(SensorConfig(name="s27_sgm_AR0234", sgm=True, motor=False, imu=False, lighting=False, aux=True, ar0234=True, features=True, ptp=True)) +sensorConfigList.append(SensorConfig(name="ks21_sgm_AR0234", sgm=True, motor=False, imu=False, lighting=True, aux=False, ar0234=True, features=True, ptp=True)) +sensorConfigList.append(SensorConfig(name="remote_head_vpb", sgm=False, motor=False, imu=False, lighting=False, aux=False, ar0234=False, features=True, ptp=True)) +sensorConfigList.append(SensorConfig(name="remote_head_sgm_AR0234", sgm=True, motor=False, imu=False, lighting=True, aux=False, ar0234=True, features=True, ptp=True)) +sensorConfigList.append(SensorConfig(name="remote_head_monocam_AR0234",sgm=False, motor=False, imu=False, lighting=True, aux=False, ar0234=True, features=True, ptp=True)) for feature_name, feature in SupportedFeatures.__members__.items(): for cfg in sensorConfigList: # Don't add feature config for unsupported cameras if not cfg.features and feature != SupportedFeatures.NONE: continue + if feature == SupportedFeatures.GROUND_SURFACE and not cfg.sgm: + # ground surface requires stereo + continue gen = ParameterGenerator() @@ -130,12 +138,22 @@ for feature_name, feature in SupportedFeatures.__members__.items(): roi_width = 2048 roi_height = 2048 elif cfg.ar0234: - res_enum = gen.enum([ gen.const("960x600_256_disparities", str_t, "960x600x256", "960x600x256"), - gen.const("1920x1200_64_disparities", str_t, "1920x1200x64", "1920x1200x64"), - gen.const("1920x1200_128_disparities", str_t, "1920x1200x128", "1920x1200x128"), - gen.const("1920x1200_256_disparities", str_t, "1920x1200x256", "1920x1200x256") ], - "Available resolution settings") - gen.add("resolution", str_t, 0, "sensor resolution", "960x600x256", edit_method=res_enum) + if cfg.sgm: + # next gen stereo cameras + res_enum = gen.enum([ gen.const("960x600_256_disparities", str_t, "960x600x256", "960x600x256"), + gen.const("1920x1200_64_disparities", str_t, "1920x1200x64", "1920x1200x64"), + gen.const("1920x1200_128_disparities", str_t, "1920x1200x128", "1920x1200x128"), + gen.const("1920x1200_256_disparities", str_t, "1920x1200x256", "1920x1200x256") ], + "Available resolution settings") + gen.add("resolution", str_t, 0, "sensor resolution", "960x600x256", edit_method=res_enum) + else: + # next gen mono cameras have no dispairty settings + res_enum = gen.enum([ gen.const("960x600", str_t, "960x600x64", "960x600x64"), + gen.const("1920x1200", str_t, "1920x1200x64", "1920x1200x64") ], + "Available resolution settings") + gen.add("resolution", str_t, 0, "sensor resolution", "1920x1200x64", edit_method=res_enum) + #endif + gen.add("fps", double_t, 0, "FPS", 10.0, 1.0, 64.0) roi_width = 1920 roi_height = 1200 @@ -151,14 +169,23 @@ for feature_name, feature in SupportedFeatures.__members__.items(): gen.add("crop_offset", int_t, 0, "Crop Offset", 480, 0, 960) #endif - if cfg.name.find('st21_sgm_vga_imu') == -1: + if cfg.name.find('st21_sgm_vga_imu') == -1 and cfg.name.find('remote_head_vpb') == -1: gen.add("gain", double_t, 0, "SensorGain", 1.0, 1.0, 8.0) gen.add("auto_exposure", bool_t, 0, "AutoExposure", True) - gen.add("auto_exposure_max_time", double_t, 0, "AutoExposureMaxTime", 0.5, 0.0, 0.5); + + if cfg.ar0234: + gen.add("auto_exposure_max_time", double_t, 0, "AutoExposureMaxTime", 0.033, 0.0, 0.033); + else: + gen.add("auto_exposure_max_time", double_t, 0, "AutoExposureMaxTime", 0.5, 0.0, 0.5); + gen.add("auto_exposure_decay", int_t, 0, "AutoExposureDecay", 7, 0, 20) gen.add("auto_exposure_thresh", double_t, 0, "AutoExposureThresh", auto_exposure_threshold, 0.0, 1.0) gen.add("auto_exposure_target_intensity", double_t, 0, "AutoExposureTargetIntensity", 0.5, 0.0, 1.0) - gen.add("exposure_time", double_t, 0, "Exposure", 0.025, 0, 0.5) + + if cfg.ar0234: + gen.add("exposure_time", double_t, 0, "Exposure", 0.025, 0, 0.033) + else: + gen.add("exposure_time", double_t, 0, "Exposure", 0.025, 0, 0.5) if not cfg.ar0234: gen.add("auto_white_balance", bool_t, 0, "AutoWhiteBalance", True) @@ -187,11 +214,11 @@ for feature_name, feature in SupportedFeatures.__members__.items(): gen.add("enable_aux_controls", bool_t, 0, "AuxSensorControls", False) gen.add("aux_gain", double_t, 0, "AuxSensorGain", 1.0, 1.0, 8.0) gen.add("aux_auto_exposure", bool_t, 0, "AuxAutoExposure", True) - gen.add("aux_auto_exposure_max_time", double_t, 0, "AuxAutoExposureMaxTime", 0.5, 0.0, 0.5); + gen.add("aux_auto_exposure_max_time", double_t, 0, "AuxAutoExposureMaxTime", 0.033, 0.0, 0.033); gen.add("aux_auto_exposure_decay", int_t, 0, "AuxAutoExposureDecay", 7, 0, 20) gen.add("aux_auto_exposure_thresh", double_t, 0, "AuxAutoExposureThresh", auto_exposure_threshold, 0.0, 1.0) gen.add("aux_auto_exposure_target_intensity", double_t, 0, "AuxAutoExposureTargetIntensity", 0.5, 0.0, 1.0) - gen.add("aux_exposure_time", double_t, 0, "AuxExposure", 0.025, 0, 0.5) + gen.add("aux_exposure_time", double_t, 0, "AuxExposure", 0.025, 0, 0.033) gen.add("aux_roi_auto_exposure", bool_t, 0, "AuxRoiAutoExposure", False) gen.add("aux_roi_auto_exposure_x", int_t, 0, "AuxRoiAutoExposureX", 0, 0, 1920) gen.add("aux_roi_auto_exposure_y", int_t, 0, "AuxRoiAutoExposureY", 0, 0, 1188) @@ -216,20 +243,30 @@ for feature_name, feature in SupportedFeatures.__members__.items(): gen.add("network_time_sync", bool_t, 0, "NetworkTimeSynchronization", True); - if cfg.name.find('sgm_AR0234') != -1: + if cfg.ptp: gen.add("ptp_time_sync", bool_t, 0, "PTPTimeSynchronization", False); trigger_source_enum = gen.enum([ gen.const("internal", int_t, 0, ""), - gen.const("ptp", int_t, 3, "")], - "Trigger sources"); + gen.const("ptp", int_t, 3, "")], + "Trigger sources"); gen.add("trigger_source", int_t, 0, "Trigger Source", 0, edit_method=trigger_source_enum) + #endif + + if cfg.ar0234: + if cfg.sgm: + gen.add("detail_disparity_profile", bool_t, 0, "DetailDisparityProfile", False); - gen.add("detail_disparity_profile", bool_t, 0, "DetailDisparityProfile", False); gen.add("high_contrast_profile", bool_t, 0, "HighContrastProfile", False); gen.add("show_roi_profile", bool_t, 0, "ShowRoiProfile", False); - gen.add("full_res_aux_profile", bool_t, 0, "FullResAuxProfile", False); + if cfg.aux: + gen.add("full_res_aux_profile", bool_t, 0, "FullResAuxProfile", False); + #endif if feature == SupportedFeatures.GROUND_SURFACE: gen.add("ground_surface_profile", bool_t, 0, "GroundSurfaceProfile", False); + + if feature == SupportedFeatures.APRILTAG: + gen.add("apriltag_profile", bool_t, 0, "AprilTagProfile", False); + #endif #endif @@ -283,21 +320,23 @@ for feature_name, feature in SupportedFeatures.__members__.items(): gen.add("magnetometer_range", int_t, 0, "Magnetometer Range", 0, edit_method=m_range_enum); #endif - clipping_enum = gen.enum([ gen.const("None", int_t, 0, "No Border Clip"), - gen.const("Rectangular", int_t, 1, "Rectangular Border Clip"), - gen.const("Circular", int_t, 2, "Circular Border Clip")], - "Available border clipping options") - - if "4000" in cfg.name: - clipping_max = 400.0 - else: - clipping_max = 200.0 - #endif + if cfg.sgm or cfg.name.find("sl_bm") != -1: + # clipping is a stereo only feature + clipping_enum = gen.enum([ gen.const("None", int_t, 0, "No Border Clip"), + gen.const("Rectangular", int_t, 1, "Rectangular Border Clip"), + gen.const("Circular", int_t, 2, "Circular Border Clip")], + "Available border clipping options") + + if "4000" in cfg.name: + clipping_max = 400.0 + else: + clipping_max = 200.0 + #endif - gen.add("border_clip_type", int_t, 0, "point cloud border clip type", 0, 0, 2, edit_method=clipping_enum) - gen.add("border_clip_value", double_t, 0, "point cloud border clip value", 0.0, 0.0, clipping_max) + gen.add("border_clip_type", int_t, 0, "point cloud border clip type", 0, 0, 2, edit_method=clipping_enum) + gen.add("border_clip_value", double_t, 0, "point cloud border clip value", 0.0, 0.0, clipping_max) - gen.add("max_point_cloud_range", double_t, 0, "max point cloud range", 15.0, 0.0, 100.0) + gen.add("max_point_cloud_range", double_t, 0, "max point cloud range", 15.0, 0.0, 100.0) gen.add("origin_from_camera_position_x_m", double_t, 0, "Origin from camera extrinsics transform x value (m)", 0.0, -100.0, 100.0) gen.add("origin_from_camera_position_y_m", double_t, 0, "Origin from camera extrinsics transform y value (m)", 0.0, -100.0, 100.0) @@ -331,6 +370,26 @@ for feature_name, feature in SupportedFeatures.__members__.items(): gen.add("ground_surface_spline_draw_resolution", double_t, 0, "Resolution to draw resulting B-Spline model with in RVIZ", 0.1, 0.01, 1.0) #endif + if feature == SupportedFeatures.APRILTAG: + apriltag_family_enum = gen.enum([gen.const("tag25h9", str_t, "tag25h9", "tag25h9"), + gen.const("tag16h5", str_t, "tag16h5", "tag16h5"), + gen.const("tag36h10", str_t, "tag36h10", "tag36h10"), + gen.const("tag36h11", str_t, "tag36h11", "tag36h11"), + gen.const("tagCircle21h7", str_t, "tagCircle21h7", "tagCircle21h7"), + gen.const("tagCircle49h12", str_t, "tagCircle49h12", "tagCircle49h12"), + gen.const("tagCustom48h12", str_t, "tagCustom48h12", "tagCustom48h12"), + gen.const("tagStandard41h12", str_t, "tagStandard41h12", "tagStandard41h12"), + gen.const("tagStandard52h13", str_t, "tagStandard52h13", "tagStandard52h13")], + "Available apriltag families") + gen.add("apriltag_family", str_t, 0, "Apriltag family to detect", "tagStandard52h13", edit_method=apriltag_family_enum) + gen.add("apriltag_max_hamming", int_t, 0, "The maximum hamming correction that will be applied while detecting codes (value of 1 causes the detector to pause for a while)", 0, 0, 1) + gen.add("apriltag_quad_detection_blur_sigma", double_t, 0, "Sigma of the Gaussian blur applied to the image before quad_detection, specified in full resolution pixels. Kernel size = 4*sigma, rounded up to the next odd number. (<0.5 results in no blur)", 0.75, 0.0, 5.0) + gen.add("apriltag_quad_detection_decimate", double_t, 0, "Amount to decimate image before detecting quads. Values < 1.0. (0.5 decimation reduces height/width by half)", 1.0, 0.0, 1.0) + gen.add("apriltag_min_border_width", int_t, 0, "Minimum border width that can be considered a valid tag. Used to filter contours based on area and perimeter. Units are in undecimated image pixels.", 5, 0, 10) + gen.add("apriltag_refine_quad_edges", bool_t, 0, "Whether or not to refine the edges before attempting to decode", False) + gen.add("apriltag_decode_sharpening", double_t, 0, "How much to sharpen the quad before sampling the pixels. 0 to turn off, large values are stronger sharpening", 0.25, 0.0, 1.0) + #endif + # Generate package name based on camera cfg and supported features if feature == SupportedFeatures.NONE: package_name = cfg.name diff --git a/multisense_ros/include/multisense_ros/camera.h b/multisense_ros/include/multisense_ros/camera.h index 9f10c22..79164da 100644 --- a/multisense_ros/include/multisense_ros/camera.h +++ b/multisense_ros/include/multisense_ros/camera.h @@ -77,6 +77,7 @@ class Camera { void colorizeCallback(const crl::multisense::image::Header& header); void groundSurfaceCallback(const crl::multisense::image::Header& header); void groundSurfaceSplineCallback(const crl::multisense::ground_surface::Header& header); + void apriltagCallback(const crl::multisense::apriltag::Header& header); void borderClipChanged(const BorderClip &borderClipType, double borderClipValue); @@ -140,6 +141,7 @@ class Camera { static constexpr char GROUND_SURFACE_IMAGE_TOPIC[] = "image"; static constexpr char GROUND_SURFACE_INFO_TOPIC[] = "camera_info"; static constexpr char GROUND_SURFACE_POINT_SPLINE_TOPIC[] = "spline"; + static constexpr char APRILTAG_DETECTION_TOPIC[] = "apriltag_detections"; // @@ -169,6 +171,7 @@ class Camera { ros::NodeHandle aux_nh_; ros::NodeHandle calibration_nh_; ros::NodeHandle ground_surface_nh_; + ros::NodeHandle apriltag_nh_; // // Image transports @@ -222,6 +225,7 @@ class Camera { ros::Publisher aux_rect_cam_info_pub_; ros::Publisher aux_rgb_rect_cam_info_pub_; ros::Publisher ground_surface_info_pub_; + ros::Publisher apriltag_detection_pub_; ros::Publisher luma_point_cloud_pub_; ros::Publisher color_point_cloud_pub_; @@ -340,11 +344,36 @@ class Camera { std::unordered_map>> image_buffers_; + // + // Has a left camera + + bool has_right_camera_ = false; + + // + // Has a left camera + + bool has_left_camera_ = false; + // // Has a 3rd aux color camera bool has_aux_camera_ = false; + // + // Has color camera streams, either color sensors or a color aux + + bool has_color_ = false; + + // + // Can support the ground surface detector + + bool can_support_ground_surface_ = false; + + // + // Can support the apriltag detector + + bool can_support_apriltag_ = false; + // // Diagnostics diagnostic_updater::Updater diagnostic_updater_; diff --git a/multisense_ros/include/multisense_ros/camera_utilities.h b/multisense_ros/include/multisense_ros/camera_utilities.h index 77e6696..f1ce55e 100644 --- a/multisense_ros/include/multisense_ros/camera_utilities.h +++ b/multisense_ros/include/multisense_ros/camera_utilities.h @@ -166,14 +166,43 @@ class StereoCalibrationManger Eigen::Matrix4d Q() const; /// - /// @brief Translation which transforms points from the right camera frame into the left camera frame + /// @brief Translation which transforms points from the rectified left camera frame into the recified right camera + /// frame /// double T() const; /// - /// @brief Translation which transforms points from the aux camera frame into the left camera frame + /// @brief Translation which transforms points from the rectified left camera frame into the rectified aux + /// camera frame /// - double aux_T() const; + Eigen::Vector3d aux_T() const; + + /// + /// @brief Reproject disparity values into 3D + /// + Eigen::Vector3f reproject(size_t u, size_t v, double d) const; + + /// + /// @brief Reproject disparity values into 3D + /// + Eigen::Vector3f reproject(size_t u, + size_t v, + double d, + const sensor_msgs::CameraInfo &left_camera_info, + const sensor_msgs::CameraInfo &right_camera_info) const; + + /// + /// @brief Project points corresponding to disparity measurements in the left rectified image frame into the + /// aux rectified image plane + /// + Eigen::Vector2f rectifiedAuxProject(const Eigen::Vector3f &left_rectified_point) const; + + /// + /// @brief Project points corresponding to disparity measurements in the left rectified image frame into the + /// aux rectified image plane + /// + Eigen::Vector2f rectifiedAuxProject(const Eigen::Vector3f &left_rectified_point, + const sensor_msgs::CameraInfo &aux_camera_info) const; /// /// @brief Get the current main stereo pair operating resolution. This resolution applies for both the mono diff --git a/multisense_ros/include/multisense_ros/reconfigure.h b/multisense_ros/include/multisense_ros/reconfigure.h index 413d314..883a147 100644 --- a/multisense_ros/include/multisense_ros/reconfigure.h +++ b/multisense_ros/include/multisense_ros/reconfigure.h @@ -53,8 +53,16 @@ #include #include #include +#include #include #include +#include +#include +#include +#include +#include +#include + #include #include @@ -89,11 +97,18 @@ class Reconfigure { void callback_st21_vga (multisense_ros::st21_sgm_vga_imuConfig& config, uint32_t level); void callback_mono_cmv2000 (multisense_ros::mono_cmv2000Config& config, uint32_t level); void callback_mono_cmv4000 (multisense_ros::mono_cmv4000Config& config, uint32_t level); - void callback_s27_AR0234 (multisense_ros::s27_sgm_AR0234Config& config, uint32_t level); - void callback_ks21_AR0234 (multisense_ros::ks21_sgm_AR0234Config& config, uint32_t level); - void callback_s27_AR0234_ground_surface (multisense_ros::s27_sgm_AR0234_ground_surfaceConfig& dyn, uint32_t level); - void callback_ks21_AR0234_ground_surface (multisense_ros::ks21_sgm_AR0234_ground_surfaceConfig& dyn, uint32_t level); + void callback_s27_AR0234 (multisense_ros::s27_sgm_AR0234Config& config, uint32_t level); + void callback_s27_AR0234_ground_surface (multisense_ros::s27_sgm_AR0234_ground_surfaceConfig& config, uint32_t level); + void callback_s27_AR0234_apriltag (multisense_ros::s27_sgm_AR0234_apriltagConfig& config, uint32_t level); + void callback_ks21_AR0234 (multisense_ros::ks21_sgm_AR0234Config& config, uint32_t level); + void callback_ks21_AR0234_ground_surface (multisense_ros::ks21_sgm_AR0234_ground_surfaceConfig& config, uint32_t level); + void callback_ks21_AR0234_apriltag (multisense_ros::ks21_sgm_AR0234_apriltagConfig& config, uint32_t level); + void callback_remote_head_vpb (multisense_ros::remote_head_vpbConfig& config, uint32_t level); + void callback_remote_head_sgm_AR0234 (multisense_ros::remote_head_sgm_AR0234Config& config, uint32_t level); + void callback_remote_head_sgm_AR0234_ground_surface(multisense_ros::remote_head_sgm_AR0234_ground_surfaceConfig& config, uint32_t level); + void callback_remote_head_sgm_AR0234_apriltag (multisense_ros::remote_head_sgm_AR0234_apriltagConfig& config, uint32_t level); + void callback_remote_head_monocam_AR0234 (multisense_ros::remote_head_monocam_AR0234Config& config, uint32_t level); // // Internal helper functions @@ -112,10 +127,14 @@ class Reconfigure { template void configureBorderClip(const T& dyn); template void configurePointCloudRange(const T& dyn); template void configurePtp(const T& dyn); - template void configureStereoProfile(crl::multisense::image::Config &cfg, const T& dyn); - template void configureStereoProfileWithGroundSurface(crl::multisense::image::Config &cfg, const T& dyn); + template void configureStereoProfile(crl::multisense::CameraProfile &profile, const T& dyn); + template void configureGroundSurfaceStereoProfile(crl::multisense::CameraProfile &profile, const T& dyn); + template void configureApriltagStereoProfile(crl::multisense::CameraProfile &profile, const T& dyn); + template void configureFullResAuxStereoProfile(crl::multisense::CameraProfile &profile, const T& dyn); + template void configureDetailDisparityStereoProfile(crl::multisense::CameraProfile &profile, const T& dyn); template void configureExtrinsics(const T& dyn); template void configureGroundSurfaceParams(const T& dyn); + template void configureApriltagParams(const T& dyn); // // CRL sensor API @@ -152,10 +171,18 @@ class Reconfigure { std::shared_ptr< dynamic_reconfigure::Server > server_st21_vga_; std::shared_ptr< dynamic_reconfigure::Server > server_mono_cmv2000_; std::shared_ptr< dynamic_reconfigure::Server > server_mono_cmv4000_; - std::shared_ptr< dynamic_reconfigure::Server > server_s27_AR0234_; - std::shared_ptr< dynamic_reconfigure::Server > server_ks21_sgm_AR0234_; + std::shared_ptr< dynamic_reconfigure::Server > server_s27_AR0234_; std::shared_ptr< dynamic_reconfigure::Server > server_s27_AR0234_ground_surface_; + std::shared_ptr< dynamic_reconfigure::Server > server_s27_AR0234_apriltag_; + std::shared_ptr< dynamic_reconfigure::Server > server_ks21_sgm_AR0234_; std::shared_ptr< dynamic_reconfigure::Server > server_ks21_sgm_AR0234_ground_surface_; + std::shared_ptr< dynamic_reconfigure::Server > server_ks21_sgm_AR0234_apriltag_; + + std::shared_ptr< dynamic_reconfigure::Server > server_remote_head_vpb_; + std::shared_ptr< dynamic_reconfigure::Server > server_remote_head_sgm_AR0234_; + std::shared_ptr< dynamic_reconfigure::Server > server_remote_head_sgm_AR0234_ground_surface_; + std::shared_ptr< dynamic_reconfigure::Server > server_remote_head_sgm_AR0234_apriltag_; + std::shared_ptr< dynamic_reconfigure::Server > server_remote_head_monocam_AR0234_; // // Cached values for supported sub-systems (these may be unavailable on @@ -192,7 +219,7 @@ class Reconfigure { std::function extrinsics_callback_; // - // Extrinsics callback to modify pointcloud + // Callback to modify spline drawing parameters std::function spline_draw_parameters_callback_; }; diff --git a/multisense_ros/msg/AprilTagCornerPoint.msg b/multisense_ros/msg/AprilTagCornerPoint.msg new file mode 100644 index 0000000..4e44857 --- /dev/null +++ b/multisense_ros/msg/AprilTagCornerPoint.msg @@ -0,0 +1,3 @@ +# Apriltag corner point in pixels +float64 x +float64 y diff --git a/multisense_ros/msg/AprilTagDetection.msg b/multisense_ros/msg/AprilTagDetection.msg new file mode 100644 index 0000000..98bf884 --- /dev/null +++ b/multisense_ros/msg/AprilTagDetection.msg @@ -0,0 +1,28 @@ +# The family of the tag +string family + +# The ID of the tag +uint32 id + +# The hamming distance between the detection and the real code +uint8 hamming + +# The quality/confidence of the binary decoding process +# average difference between intensity of data bit vs decision thresh. +# Higher is better. Only useful for small tags +float32 decisionMargin + +# The 3x3 homography matrix (in row-major order) describing the projection +# from an "ideal" tag (with corners at (-1,1), (1,1), (1,-1), and (-1,-1)) +# to pixels in the image +float64[9] tagToImageHomography + +# The 2D position of the origin of the tag in the image +float64[2] center + +# The 4 tag corner pixel locations in the order: +# point 0: [-squareLength / 2, squareLength / 2] +# point 1: [ squareLength / 2, squareLength / 2] +# point 2: [ squareLength / 2, -squareLength / 2] +# point 3: [-squareLength / 2, -squareLength / 2] +AprilTagCornerPoint[4] corners diff --git a/multisense_ros/msg/AprilTagDetectionArray.msg b/multisense_ros/msg/AprilTagDetectionArray.msg new file mode 100644 index 0000000..a6982d7 --- /dev/null +++ b/multisense_ros/msg/AprilTagDetectionArray.msg @@ -0,0 +1,16 @@ +std_msgs/Header header + +# The frame timestamp (nanoseconds) of the image that the apriltags were detected on +int64 timestamp + +# The image source that the apriltags were detected on +string imageSource + +# Success flag to indicate whether for the apriltag algorithm ran successfully +uint8 success + +# The number of apriltags that were detected +uint32 numDetections + +# The collection of apriltag detections +AprilTagDetection[] detections diff --git a/multisense_ros/src/camera.cpp b/multisense_ros/src/camera.cpp index 22db581..6aa3728 100644 --- a/multisense_ros/src/camera.cpp +++ b/multisense_ros/src/camera.cpp @@ -50,6 +50,10 @@ #include #include #include +#include +#include +#include + using namespace crl::multisense; @@ -114,9 +118,11 @@ void groundSurfaceCB(const image::Header& header, void* userDataP) { reinterpret_cast(userDataP)->groundSurfaceCallback(header); } void groundSurfaceSplineCB(const ground_surface::Header& header, void* userDataP) { reinterpret_cast(userDataP)->groundSurfaceSplineCallback(header); } +void apriltagCB(const apriltag::Header& header, void* userDataP) +{ reinterpret_cast(userDataP)->apriltagCallback(header); } -bool isValidReprojectedPoint(const Eigen::Vector3f& pt, double squared_max_range) +bool isValidReprojectedPoint(const Eigen::Vector3f& pt, float squared_max_range) { return pt[2] > 0.0f && std::isfinite(pt[2]) && pt.squaredNorm() < squared_max_range; } @@ -195,26 +201,38 @@ bool clipPoint(const BorderClip& borderClipType, return true; } -cv::Vec3b u_interpolate_color(double u, double v, const cv::Mat &image) +cv::Vec3b interpolate_color(const Eigen::Vector2f &pixel, const cv::Mat &image) { - const double width = image.cols; + const float width = image.cols; + const float height = image.rows; + + const float &u = pixel(0); + const float &v = pixel(1); // - // Interpolate in just the u dimension + // Implement a basic bileinar interpolation scheme + // https://en.wikipedia.org/wiki/Bilinear_interpolation // - const size_t min_u = static_cast(std::min(std::max(std::floor(u), 0.), width - 1.)); - const size_t max_u = static_cast(std::min(std::max(std::ceil(u), 0.), width - 1.)); + const size_t min_u = static_cast(std::min(std::max(std::floor(u), 0.f), width - 1.f)); + const size_t max_u = static_cast(std::min(std::max(std::floor(u) + 1, 0.f), width - 1.f)); + const size_t min_v = static_cast(std::min(std::max(std::floor(v), 0.f), height - 1.f)); + const size_t max_v = static_cast(std::min(std::max(std::floor(v) + 1, 0.f), height - 1.f)); - const cv::Vec3d element0 = image.at(width * v + min_u); - const cv::Vec3d element1 = image.at(width * v + max_u); + const cv::Vec3d element00 = image.at(width * min_v + min_u); + const cv::Vec3d element01 = image.at(width * min_v + max_u); + const cv::Vec3d element10 = image.at(width * max_v + min_u); + const cv::Vec3d element11 = image.at(width * max_v + max_u); const size_t delta_u = max_u - min_u; + const size_t delta_v = max_u - min_u; const double u_ratio = delta_u == 0 ? 1. : (static_cast(max_u) - u) / static_cast(delta_u); + const double v_ratio = delta_v == 0 ? 1. : (static_cast(max_v) - v) / static_cast(delta_v); - const cv::Vec3b result = (element0 * u_ratio + element1 * (1. - u_ratio)); + const cv::Vec3b f_xy0 = element00 * u_ratio + element01 * (1. - u_ratio); + const cv::Vec3b f_xy1 = element10 * u_ratio + element11 * (1. - u_ratio); - return result; + return (f_xy0 * v_ratio + f_xy1 * (1. - v_ratio)); } } // anonymous @@ -265,6 +283,7 @@ constexpr char Camera::COST_CAMERA_INFO_TOPIC[]; constexpr char Camera::GROUND_SURFACE_IMAGE_TOPIC[]; constexpr char Camera::GROUND_SURFACE_INFO_TOPIC[]; constexpr char Camera::GROUND_SURFACE_POINT_SPLINE_TOPIC[]; +constexpr char Camera::APRILTAG_DETECTION_TOPIC[]; Camera::Camera(Channel* driver, const std::string& tf_prefix) : driver_(driver), @@ -274,6 +293,7 @@ Camera::Camera(Channel* driver, const std::string& tf_prefix) : aux_nh_(device_nh_, AUX), calibration_nh_(device_nh_, CALIBRATION), ground_surface_nh_(device_nh_, GROUND_SURFACE), + apriltag_nh_(device_nh_, LEFT), left_mono_transport_(left_nh_), right_mono_transport_(right_nh_), left_rect_transport_(left_nh_), @@ -332,9 +352,42 @@ Camera::Camera(Channel* driver, const std::string& tf_prefix) : // // S27/S30 cameras have a 3rd aux color camera and no left color camera + has_left_camera_ = system::DeviceInfo::HARDWARE_REV_MULTISENSE_SL == device_info_.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_S7 == device_info_.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_M == device_info_.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_S7S == device_info_.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_S21 == device_info_.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_ST21 == device_info_.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_C6S2_S27 == device_info_.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_S30 == device_info_.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_S7AR == device_info_.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_KS21 == device_info_.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_MONOCAM == device_info_.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_REMOTE_HEAD_STEREO == device_info_.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_REMOTE_HEAD_MONOCAM == device_info_.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MONO == device_info_.hardwareRevision; + + has_right_camera_ = system::DeviceInfo::HARDWARE_REV_MULTISENSE_SL == device_info_.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_S7 == device_info_.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_S7S == device_info_.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_S21 == device_info_.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_ST21 == device_info_.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_C6S2_S27 == device_info_.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_S30 == device_info_.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_S7AR == device_info_.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_KS21 == device_info_.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_REMOTE_HEAD_STEREO == device_info_.hardwareRevision; + has_aux_camera_ = system::DeviceInfo::HARDWARE_REV_MULTISENSE_C6S2_S27 == device_info_.hardwareRevision || system::DeviceInfo::HARDWARE_REV_MULTISENSE_S30 == device_info_.hardwareRevision; + has_color_ = has_aux_camera_ || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_M == device_info_.hardwareRevision || + system::DeviceInfo::IMAGER_TYPE_CMV2000_COLOR == device_info_.imagerType || + system::DeviceInfo::IMAGER_TYPE_CMV4000_COLOR == device_info_.imagerType || + system::DeviceInfo::IMAGER_TYPE_AR0239_COLOR == device_info_.imagerType; + + // // Topics published for all device types @@ -344,27 +397,44 @@ Camera::Camera(Channel* driver, const std::string& tf_prefix) : histogram_pub_ = device_nh_.advertise(HISTOGRAM_TOPIC, 5); // - // Create spline-based ground surface publishers for S27/S30 cameras + // Create publishers for algorithms which are available + + std::vector device_modes; + status = driver_->getDeviceModes(device_modes); + if (Status_Ok != status) { + ROS_ERROR("Reconfigure: failed to query device modes: %s", Channel::statusString(status)); + return; + } - const bool can_support_ground_surface = - system::DeviceInfo::HARDWARE_REV_MULTISENSE_C6S2_S27 == device_info_.hardwareRevision || - system::DeviceInfo::HARDWARE_REV_MULTISENSE_S30 == device_info_.hardwareRevision; + const bool can_support_ground_surface_ = + std::any_of(device_modes.begin(), device_modes.end(), [](const auto &mode) { + return (mode.supportedDataSources & Source_Ground_Surface_Spline_Data) && + (mode.supportedDataSources & Source_Ground_Surface_Class_Image); }); - if (can_support_ground_surface) { + if (can_support_ground_surface_) { ground_surface_cam_pub_ = ground_surface_transport_.advertise(GROUND_SURFACE_IMAGE_TOPIC, 5, std::bind(&Camera::connectStream, this, Source_Ground_Surface_Class_Image), std::bind(&Camera::disconnectStream, this, Source_Ground_Surface_Class_Image)); - } - ground_surface_info_pub_ = ground_surface_nh_.advertise(GROUND_SURFACE_INFO_TOPIC, 1, true); + ground_surface_info_pub_ = ground_surface_nh_.advertise(GROUND_SURFACE_INFO_TOPIC, 1, true); + + ground_surface_spline_pub_ = ground_surface_nh_.advertise(GROUND_SURFACE_POINT_SPLINE_TOPIC, 5, true); + } - ground_surface_spline_pub_ = ground_surface_nh_.advertise(GROUND_SURFACE_POINT_SPLINE_TOPIC, 5, true); + const bool can_support_apriltag_ = + std::any_of(device_modes.begin(), device_modes.end(), [](const auto &mode) { + return mode.supportedDataSources & Source_AprilTag_Detections; }); - // - // Create topic publishers (TODO: color topics should not be advertised if the device can't support it) + if (can_support_apriltag_) { + apriltag_detection_pub_ = apriltag_nh_.advertise(APRILTAG_DETECTION_TOPIC, 5, + std::bind(&Camera::connectStream, this, Source_AprilTag_Detections), + std::bind(&Camera::disconnectStream, this, Source_AprilTag_Detections)); + } if (system::DeviceInfo::HARDWARE_REV_BCAM == device_info_.hardwareRevision) { + // bcam has unique topics compared to all other S* variants + left_mono_cam_pub_ = left_mono_transport_.advertise(MONO_TOPIC, 5, std::bind(&Camera::connectStream, this, Source_Luma_Left), std::bind(&Camera::disconnectStream, this, Source_Luma_Left)); @@ -381,156 +451,160 @@ Camera::Camera(Channel* driver, const std::string& tf_prefix) : left_rgb_cam_info_pub_ = left_nh_.advertise(COLOR_CAMERA_INFO_TOPIC, 1, true); left_rgb_rect_cam_info_pub_ = left_nh_.advertise(RECT_COLOR_CAMERA_INFO_TOPIC, 1, true); + } else { - } else if (system::DeviceInfo::HARDWARE_REV_MULTISENSE_M == device_info_.hardwareRevision) { + // all other MultiSense-S* variations - // monocular variation + if (has_left_camera_) { - left_mono_cam_pub_ = left_mono_transport_.advertise(MONO_TOPIC, 5, - std::bind(&Camera::connectStream, this, Source_Luma_Left), - std::bind(&Camera::disconnectStream, this, Source_Luma_Left)); - left_rect_cam_pub_ = left_rect_transport_.advertiseCamera(RECT_TOPIC, 5, - std::bind(&Camera::connectStream, this, Source_Luma_Rectified_Left), - std::bind(&Camera::disconnectStream, this, Source_Luma_Rectified_Left)); - left_rgb_cam_pub_ = left_rgb_transport_.advertise(COLOR_TOPIC, 5, - std::bind(&Camera::connectStream, this, Source_Luma_Left | Source_Chroma_Left), - std::bind(&Camera::disconnectStream, this, Source_Luma_Left | Source_Chroma_Left)); - left_rgb_rect_cam_pub_ = left_rgb_rect_transport_.advertiseCamera(RECT_COLOR_TOPIC, 5, - std::bind(&Camera::connectStream, this, Source_Luma_Left | Source_Chroma_Left), - std::bind(&Camera::disconnectStream, this, Source_Luma_Left | Source_Chroma_Left)); + left_mono_cam_pub_ = left_mono_transport_.advertise(MONO_TOPIC, 5, + std::bind(&Camera::connectStream, this, Source_Luma_Left), + std::bind(&Camera::disconnectStream, this, Source_Luma_Left)); - left_mono_cam_info_pub_ = left_nh_.advertise(MONO_CAMERA_INFO_TOPIC, 1, true); - left_rect_cam_info_pub_ = left_nh_.advertise(RECT_CAMERA_INFO_TOPIC, 1, true); - left_rgb_cam_info_pub_ = left_nh_.advertise(COLOR_CAMERA_INFO_TOPIC, 1, true); - left_rgb_rect_cam_info_pub_ = left_nh_.advertise(RECT_COLOR_CAMERA_INFO_TOPIC, 1, true); + left_mono_cam_info_pub_ = left_nh_.advertise(MONO_CAMERA_INFO_TOPIC, 1, true); - } else { // all other MultiSense-S* variations + left_rect_cam_pub_ = left_rect_transport_.advertiseCamera(RECT_TOPIC, 5, + std::bind(&Camera::connectStream, this, Source_Luma_Rectified_Left), + std::bind(&Camera::disconnectStream, this, Source_Luma_Rectified_Left)); + left_rect_cam_info_pub_ = left_nh_.advertise(RECT_CAMERA_INFO_TOPIC, 1, true); + } - left_mono_cam_pub_ = left_mono_transport_.advertise(MONO_TOPIC, 5, - std::bind(&Camera::connectStream, this, Source_Luma_Left), - std::bind(&Camera::disconnectStream, this, Source_Luma_Left)); - right_mono_cam_pub_ = right_mono_transport_.advertise(MONO_TOPIC, 5, - std::bind(&Camera::connectStream, this, Source_Luma_Right), - std::bind(&Camera::disconnectStream, this, Source_Luma_Right)); - left_rect_cam_pub_ = left_rect_transport_.advertiseCamera(RECT_TOPIC, 5, - std::bind(&Camera::connectStream, this, Source_Luma_Rectified_Left), - std::bind(&Camera::disconnectStream, this, Source_Luma_Rectified_Left)); - right_rect_cam_pub_ = right_rect_transport_.advertiseCamera(RECT_TOPIC, 5, - std::bind(&Camera::connectStream, this, Source_Luma_Rectified_Right), - std::bind(&Camera::disconnectStream, this, Source_Luma_Rectified_Right)); - depth_cam_pub_ = depth_transport_.advertise(DEPTH_TOPIC, 5, - std::bind(&Camera::connectStream, this, Source_Disparity), - std::bind(&Camera::disconnectStream, this, Source_Disparity)); - ni_depth_cam_pub_ = ni_depth_transport_.advertise(OPENNI_DEPTH_TOPIC, 5, - std::bind(&Camera::connectStream, this, Source_Disparity), - std::bind(&Camera::disconnectStream, this, Source_Disparity)); - - if (system::DeviceInfo::HARDWARE_REV_MULTISENSE_ST21 != device_info_.hardwareRevision) { + if (has_right_camera_) { - if (has_aux_camera_) { + right_mono_cam_pub_ = right_mono_transport_.advertise(MONO_TOPIC, 5, + std::bind(&Camera::connectStream, this, Source_Luma_Right), + std::bind(&Camera::disconnectStream, this, Source_Luma_Right)); + + right_mono_cam_info_pub_ = right_nh_.advertise(MONO_CAMERA_INFO_TOPIC, 1, true); - aux_mono_cam_pub_ = aux_mono_transport_.advertise(MONO_TOPIC, 5, - std::bind(&Camera::connectStream, this, Source_Luma_Aux), - std::bind(&Camera::disconnectStream, this, Source_Luma_Aux)); + right_rect_cam_pub_ = right_rect_transport_.advertiseCamera(RECT_TOPIC, 5, + std::bind(&Camera::connectStream, this, Source_Luma_Rectified_Right), + std::bind(&Camera::disconnectStream, this, Source_Luma_Rectified_Right)); - aux_mono_cam_info_pub_ = aux_nh_.advertise(MONO_CAMERA_INFO_TOPIC, 1, true); + right_rect_cam_info_pub_ = right_nh_.advertise(RECT_CAMERA_INFO_TOPIC, 1, true); + } - aux_rgb_cam_pub_ = aux_rgb_transport_.advertise(COLOR_TOPIC, 5, - std::bind(&Camera::connectStream, this, Source_Luma_Aux | Source_Chroma_Aux), - std::bind(&Camera::disconnectStream, this, Source_Luma_Aux | Source_Chroma_Aux)); + // only publish for stereo cameras + if (has_left_camera_ && has_right_camera_) { - aux_rgb_cam_info_pub_ = aux_nh_.advertise(COLOR_CAMERA_INFO_TOPIC, 1, true); + // run the stereo topics if left and right cameras both exist - aux_rect_cam_pub_ = aux_rect_transport_.advertiseCamera(RECT_TOPIC, 5, - std::bind(&Camera::connectStream, this, Source_Luma_Rectified_Aux), - std::bind(&Camera::disconnectStream, this, Source_Luma_Rectified_Aux)); + depth_cam_pub_ = depth_transport_.advertise(DEPTH_TOPIC, 5, + std::bind(&Camera::connectStream, this, Source_Disparity), + std::bind(&Camera::disconnectStream, this, Source_Disparity)); - aux_rect_cam_info_pub_ = aux_nh_.advertise(RECT_CAMERA_INFO_TOPIC, 1, true); + ni_depth_cam_pub_ = ni_depth_transport_.advertise(OPENNI_DEPTH_TOPIC, 5, + std::bind(&Camera::connectStream, this, Source_Disparity), + std::bind(&Camera::disconnectStream, this, Source_Disparity)); - aux_rgb_rect_cam_pub_ = aux_rgb_rect_transport_.advertiseCamera(RECT_COLOR_TOPIC, 5, - std::bind(&Camera::connectStream, this, Source_Luma_Rectified_Aux | Source_Chroma_Rectified_Aux), - std::bind(&Camera::disconnectStream, this, Source_Luma_Rectified_Aux | Source_Chroma_Rectified_Aux)); + depth_cam_info_pub_ = device_nh_.advertise(DEPTH_CAMERA_INFO_TOPIC, 1, true); - aux_rgb_rect_cam_info_pub_ = aux_nh_.advertise(RECT_COLOR_CAMERA_INFO_TOPIC, 1, true); - } - else { + luma_point_cloud_pub_ = device_nh_.advertise(POINTCLOUD_TOPIC, 5, + std::bind(&Camera::connectStream, this, Source_Luma_Rectified_Left | Source_Disparity), + std::bind(&Camera::disconnectStream, this, Source_Luma_Rectified_Left | Source_Disparity)); - left_rgb_cam_pub_ = left_rgb_transport_.advertise(COLOR_TOPIC, 5, - std::bind(&Camera::connectStream, this, Source_Luma_Left | Source_Chroma_Left), - std::bind(&Camera::disconnectStream, this, Source_Luma_Left | Source_Chroma_Left)); + luma_organized_point_cloud_pub_ = device_nh_.advertise(ORGANIZED_POINTCLOUD_TOPIC, 5, + std::bind(&Camera::connectStream, this, Source_Luma_Rectified_Left | Source_Disparity), + std::bind(&Camera::disconnectStream, this, Source_Luma_Rectified_Left | Source_Disparity)); - left_rgb_cam_info_pub_ = left_nh_.advertise(COLOR_CAMERA_INFO_TOPIC, 1, true); - left_rgb_rect_cam_info_pub_ = left_nh_.advertise(RECT_COLOR_CAMERA_INFO_TOPIC, 1, true); + raw_cam_data_pub_ = calibration_nh_.advertise(RAW_CAM_DATA_TOPIC, 5, + std::bind(&Camera::connectStream, this, Source_Luma_Rectified_Left | Source_Disparity), + std::bind(&Camera::disconnectStream, this, Source_Luma_Rectified_Left | Source_Disparity)); - left_rgb_rect_cam_pub_ = left_rgb_rect_transport_.advertiseCamera(RECT_COLOR_TOPIC, 5, - std::bind(&Camera::connectStream, this, Source_Luma_Left | Source_Chroma_Left), - std::bind(&Camera::disconnectStream, this, Source_Luma_Left | Source_Chroma_Left)); + left_disparity_pub_ = disparity_left_transport_.advertise(DISPARITY_TOPIC, 5, + std::bind(&Camera::connectStream, this, Source_Disparity), + std::bind(&Camera::disconnectStream, this, Source_Disparity)); + left_disp_cam_info_pub_ = left_nh_.advertise(DISPARITY_CAMERA_INFO_TOPIC, 1, true); + + left_stereo_disparity_pub_ = left_nh_.advertise(DISPARITY_IMAGE_TOPIC, 5, + std::bind(&Camera::connectStream, this, Source_Disparity), + std::bind(&Camera::disconnectStream, this, Source_Disparity)); + + if (version_info_.sensorFirmwareVersion >= 0x0300) { + + right_disparity_pub_ = disparity_right_transport_.advertise(DISPARITY_TOPIC, 5, + std::bind(&Camera::connectStream, this, Source_Disparity_Right), + std::bind(&Camera::disconnectStream, this, Source_Disparity_Right)); + + right_disp_cam_info_pub_ = right_nh_.advertise(DISPARITY_CAMERA_INFO_TOPIC, 1, true); + + right_stereo_disparity_pub_ = right_nh_.advertise(DISPARITY_IMAGE_TOPIC, 5, + std::bind(&Camera::connectStream, this, Source_Disparity_Right), + std::bind(&Camera::disconnectStream, this, Source_Disparity_Right)); + + left_disparity_cost_pub_ = disparity_cost_transport_.advertise(COST_TOPIC, 5, + std::bind(&Camera::connectStream, this, Source_Disparity_Cost), + std::bind(&Camera::disconnectStream, this, Source_Disparity_Cost)); + + left_cost_cam_info_pub_ = left_nh_.advertise(COST_CAMERA_INFO_TOPIC, 1, true); } - const auto point_cloud_color_topics = has_aux_camera_ ? Source_Luma_Rectified_Aux | Source_Chroma_Rectified_Aux : - Source_Luma_Left | Source_Chroma_Left; + // publish colorized stereo topics + if (has_aux_camera_) { + + const auto point_cloud_color_topics = has_aux_camera_ ? Source_Luma_Rectified_Aux | Source_Chroma_Rectified_Aux : + Source_Luma_Left | Source_Chroma_Left; - color_point_cloud_pub_ = device_nh_.advertise(COLOR_POINTCLOUD_TOPIC, 5, - std::bind(&Camera::connectStream, this, - Source_Disparity | point_cloud_color_topics), - std::bind(&Camera::disconnectStream, this, - Source_Disparity | point_cloud_color_topics)); - color_organized_point_cloud_pub_ = device_nh_.advertise(COLOR_ORGANIZED_POINTCLOUD_TOPIC, 5, - std::bind(&Camera::connectStream, this, - Source_Disparity | point_cloud_color_topics), - std::bind(&Camera::disconnectStream, this, - Source_Disparity | point_cloud_color_topics)); + color_point_cloud_pub_ = device_nh_.advertise(COLOR_POINTCLOUD_TOPIC, 5, + std::bind(&Camera::connectStream, this, + Source_Disparity | point_cloud_color_topics), + std::bind(&Camera::disconnectStream, this, + Source_Disparity | point_cloud_color_topics)); + + color_organized_point_cloud_pub_ = device_nh_.advertise(COLOR_ORGANIZED_POINTCLOUD_TOPIC, 5, + std::bind(&Camera::connectStream, this, + Source_Disparity | point_cloud_color_topics), + std::bind(&Camera::disconnectStream, this, + Source_Disparity | point_cloud_color_topics)); + + } } - luma_point_cloud_pub_ = device_nh_.advertise(POINTCLOUD_TOPIC, 5, - std::bind(&Camera::connectStream, this, Source_Luma_Rectified_Left | Source_Disparity), - std::bind(&Camera::disconnectStream, this, Source_Luma_Rectified_Left | Source_Disparity)); + // handle color topic publishing if color data exists + if (has_aux_camera_) { - luma_organized_point_cloud_pub_ = device_nh_.advertise(ORGANIZED_POINTCLOUD_TOPIC, 5, - std::bind(&Camera::connectStream, this, Source_Luma_Rectified_Left | Source_Disparity), - std::bind(&Camera::disconnectStream, this, Source_Luma_Rectified_Left | Source_Disparity)); + aux_mono_cam_pub_ = aux_mono_transport_.advertise(MONO_TOPIC, 5, + std::bind(&Camera::connectStream, this, Source_Luma_Aux), + std::bind(&Camera::disconnectStream, this, Source_Luma_Aux)); - raw_cam_data_pub_ = calibration_nh_.advertise(RAW_CAM_DATA_TOPIC, 5, - std::bind(&Camera::connectStream, this, Source_Luma_Rectified_Left | Source_Disparity), - std::bind(&Camera::disconnectStream, this, Source_Luma_Rectified_Left | Source_Disparity)); + aux_mono_cam_info_pub_ = aux_nh_.advertise(MONO_CAMERA_INFO_TOPIC, 1, true); + + aux_rgb_cam_pub_ = aux_rgb_transport_.advertise(COLOR_TOPIC, 5, + std::bind(&Camera::connectStream, this, Source_Luma_Aux | Source_Chroma_Aux), + std::bind(&Camera::disconnectStream, this, Source_Luma_Aux | Source_Chroma_Aux)); + + aux_rgb_cam_info_pub_ = aux_nh_.advertise(COLOR_CAMERA_INFO_TOPIC, 1, true); - left_disparity_pub_ = disparity_left_transport_.advertise(DISPARITY_TOPIC, 5, - std::bind(&Camera::connectStream, this, Source_Disparity), - std::bind(&Camera::disconnectStream, this, Source_Disparity)); + aux_rect_cam_pub_ = aux_rect_transport_.advertiseCamera(RECT_TOPIC, 5, + std::bind(&Camera::connectStream, this, Source_Luma_Rectified_Aux), + std::bind(&Camera::disconnectStream, this, Source_Luma_Rectified_Aux)); - left_stereo_disparity_pub_ = left_nh_.advertise(DISPARITY_IMAGE_TOPIC, 5, - std::bind(&Camera::connectStream, this, Source_Disparity), - std::bind(&Camera::disconnectStream, this, Source_Disparity)); + aux_rect_cam_info_pub_ = aux_nh_.advertise(RECT_CAMERA_INFO_TOPIC, 1, true); - if (version_info_.sensorFirmwareVersion >= 0x0300) { + aux_rgb_rect_cam_pub_ = aux_rgb_rect_transport_.advertiseCamera(RECT_COLOR_TOPIC, 5, + std::bind(&Camera::connectStream, this, Source_Luma_Rectified_Aux | Source_Chroma_Rectified_Aux), + std::bind(&Camera::disconnectStream, this, Source_Luma_Rectified_Aux | Source_Chroma_Rectified_Aux)); - right_disparity_pub_ = disparity_right_transport_.advertise(DISPARITY_TOPIC, 5, - std::bind(&Camera::connectStream, this, Source_Disparity_Right), - std::bind(&Camera::disconnectStream, this, Source_Disparity_Right)); + aux_rgb_rect_cam_info_pub_ = aux_nh_.advertise(RECT_COLOR_CAMERA_INFO_TOPIC, 1, true); - right_stereo_disparity_pub_ = right_nh_.advertise(DISPARITY_IMAGE_TOPIC, 5, - std::bind(&Camera::connectStream, this, Source_Disparity_Right), - std::bind(&Camera::disconnectStream, this, Source_Disparity_Right)); + } else if (has_color_ && !has_aux_camera_) { // !has_aux_camera_ is redundant, but leaving to prevent confusion over edge cases - left_disparity_cost_pub_ = disparity_cost_transport_.advertise(COST_TOPIC, 5, - std::bind(&Camera::connectStream, this, Source_Disparity_Cost), - std::bind(&Camera::disconnectStream, this, Source_Disparity_Cost)); + left_rgb_cam_pub_ = left_rgb_transport_.advertise(COLOR_TOPIC, 5, + std::bind(&Camera::connectStream, this, Source_Luma_Left | Source_Chroma_Left), + std::bind(&Camera::disconnectStream, this, Source_Luma_Left | Source_Chroma_Left)); + + left_rgb_rect_cam_pub_ = left_rgb_rect_transport_.advertiseCamera(RECT_COLOR_TOPIC, 5, + std::bind(&Camera::connectStream, this, Source_Luma_Left | Source_Chroma_Left), + std::bind(&Camera::disconnectStream, this, Source_Luma_Left | Source_Chroma_Left)); + + left_rgb_cam_info_pub_ = left_nh_.advertise(COLOR_CAMERA_INFO_TOPIC, 1, true); + left_rgb_rect_cam_info_pub_ = left_nh_.advertise(RECT_COLOR_CAMERA_INFO_TOPIC, 1, true); - right_disp_cam_info_pub_ = right_nh_.advertise(DISPARITY_CAMERA_INFO_TOPIC, 1, true); - left_cost_cam_info_pub_ = left_nh_.advertise(COST_CAMERA_INFO_TOPIC, 1, true); } - // - // Camera info topic publishers - left_mono_cam_info_pub_ = left_nh_.advertise(MONO_CAMERA_INFO_TOPIC, 1, true); - right_mono_cam_info_pub_ = right_nh_.advertise(MONO_CAMERA_INFO_TOPIC, 1, true); - left_rect_cam_info_pub_ = left_nh_.advertise(RECT_CAMERA_INFO_TOPIC, 1, true); - right_rect_cam_info_pub_ = right_nh_.advertise(RECT_CAMERA_INFO_TOPIC, 1, true); - left_disp_cam_info_pub_ = left_nh_.advertise(DISPARITY_CAMERA_INFO_TOPIC, 1, true); - depth_cam_info_pub_ = device_nh_.advertise(DEPTH_CAMERA_INFO_TOPIC, 1, true); } // @@ -589,35 +663,56 @@ Camera::Camera(Channel* driver, const std::string& tf_prefix) : // Publish image calibration image::Calibration image_calibration; - status = driver_->getImageCalibration(image_calibration); - if (Status_Ok != status) { - ROS_ERROR("Camera: failed to query image calibration: %s", - Channel::statusString(status)); - } - else { + if (has_left_camera_ || has_right_camera_ || has_aux_camera_) + { + status = driver_->getImageCalibration(image_calibration); + if (Status_Ok != status) { + ROS_ERROR("Camera: failed to query image calibration: %s", + Channel::statusString(status)); + } + else { + // currently no ROS cameras have only an aux camera + // HARDWARE_REV_MULTISENSE_MONOCAM uses the left cal for its calibration + + multisense_ros::RawCamCal cal; + const float *cP; + + cP = reinterpret_cast(&(image_calibration.left.M[0][0])); + for(uint32_t i=0; i<9; i++) cal.left_M[i] = cP[i]; + cP = reinterpret_cast(&(image_calibration.left.D[0])); + for(uint32_t i=0; i<8; i++) cal.left_D[i] = cP[i]; + cP = reinterpret_cast(&(image_calibration.left.R[0][0])); + for(uint32_t i=0; i<9; i++) cal.left_R[i] = cP[i]; + cP = reinterpret_cast(&(image_calibration.left.P[0][0])); + for(uint32_t i=0; i<12; i++) cal.left_P[i] = cP[i]; + + if (not has_right_camera_) { + + // publish the left calibration twice if there is no right camera + cP = reinterpret_cast(&(image_calibration.left.M[0][0])); + for(uint32_t i=0; i<9; i++) cal.right_M[i] = cP[i]; + cP = reinterpret_cast(&(image_calibration.left.D[0])); + for(uint32_t i=0; i<8; i++) cal.right_D[i] = cP[i]; + cP = reinterpret_cast(&(image_calibration.left.R[0][0])); + for(uint32_t i=0; i<9; i++) cal.right_R[i] = cP[i]; + cP = reinterpret_cast(&(image_calibration.left.P[0][0])); + for(uint32_t i=0; i<12; i++) cal.right_P[i] = cP[i]; - multisense_ros::RawCamCal cal; - const float *cP; + } else { - cP = reinterpret_cast(&(image_calibration.left.M[0][0])); - for(uint32_t i=0; i<9; i++) cal.left_M[i] = cP[i]; - cP = reinterpret_cast(&(image_calibration.left.D[0])); - for(uint32_t i=0; i<8; i++) cal.left_D[i] = cP[i]; - cP = reinterpret_cast(&(image_calibration.left.R[0][0])); - for(uint32_t i=0; i<9; i++) cal.left_R[i] = cP[i]; - cP = reinterpret_cast(&(image_calibration.left.P[0][0])); - for(uint32_t i=0; i<12; i++) cal.left_P[i] = cP[i]; + cP = reinterpret_cast(&(image_calibration.right.M[0][0])); + for(uint32_t i=0; i<9; i++) cal.right_M[i] = cP[i]; + cP = reinterpret_cast(&(image_calibration.right.D[0])); + for(uint32_t i=0; i<8; i++) cal.right_D[i] = cP[i]; + cP = reinterpret_cast(&(image_calibration.right.R[0][0])); + for(uint32_t i=0; i<9; i++) cal.right_R[i] = cP[i]; + cP = reinterpret_cast(&(image_calibration.right.P[0][0])); + for(uint32_t i=0; i<12; i++) cal.right_P[i] = cP[i]; - cP = reinterpret_cast(&(image_calibration.right.M[0][0])); - for(uint32_t i=0; i<9; i++) cal.right_M[i] = cP[i]; - cP = reinterpret_cast(&(image_calibration.right.D[0])); - for(uint32_t i=0; i<8; i++) cal.right_D[i] = cP[i]; - cP = reinterpret_cast(&(image_calibration.right.R[0][0])); - for(uint32_t i=0; i<9; i++) cal.right_R[i] = cP[i]; - cP = reinterpret_cast(&(image_calibration.right.P[0][0])); - for(uint32_t i=0; i<12; i++) cal.right_P[i] = cP[i]; + } - raw_cam_cal_pub_.publish(cal); + raw_cam_cal_pub_.publish(cal); + } } stereo_calibration_manager_ = std::make_shared(image_config, image_calibration, device_info_); @@ -655,8 +750,10 @@ Camera::Camera(Channel* driver, const std::string& tf_prefix) : if (has_aux_extrinsics) { + const Eigen::Vector3d aux_T = stereo_calibration_manager_->aux_T(); + tf2::Transform rectified_aux_T_rectified_left{tf2::Matrix3x3::getIdentity(), - tf2::Vector3{stereo_calibration_manager_->aux_T(), 0., 0.}}; + tf2::Vector3{aux_T(0), aux_T(1), aux_T(2)}}; stamped_transforms[3].header.stamp = ros::Time::now(); stamped_transforms[3].header.frame_id = frame_id_rectified_left_; stamped_transforms[3].child_frame_id = frame_id_rectified_aux_; @@ -672,7 +769,7 @@ Camera::Camera(Channel* driver, const std::string& tf_prefix) : static_tf_broadcaster_.sendTransform(stamped_transforms); // - // Update our internal image config and publish intitial camera info + // Update our internal image config and publish initial camera info updateConfig(image_config); @@ -696,17 +793,37 @@ Camera::Camera(Channel* driver, const std::string& tf_prefix) : driver_->addIsolatedCallback(monoCB, Source_Luma_Left, this); driver_->addIsolatedCallback(jpegCB, Source_Jpeg_Left, this); + } else if (system::DeviceInfo::HARDWARE_REV_MULTISENSE_REMOTE_HEAD_VPB == device_info_.hardwareRevision) { + // no incoming image data from vpb + } else if (system::DeviceInfo::HARDWARE_REV_MULTISENSE_REMOTE_HEAD_MONOCAM == device_info_.hardwareRevision) { + + driver_->addIsolatedCallback(monoCB, Source_Luma_Left, this); + driver_->addIsolatedCallback(rectCB, Source_Luma_Rectified_Left, this); + } else { - driver_->addIsolatedCallback(colorizeCB, Source_Luma_Rectified_Aux | Source_Chroma_Rectified_Aux | Source_Luma_Aux | - Source_Luma_Left | Source_Chroma_Left | Source_Luma_Rectified_Left, this); - driver_->addIsolatedCallback(monoCB, Source_Luma_Left | Source_Luma_Right | Source_Luma_Aux, this); - driver_->addIsolatedCallback(rectCB, Source_Luma_Rectified_Left | Source_Luma_Rectified_Right | Source_Luma_Rectified_Aux, this); - driver_->addIsolatedCallback(depthCB, Source_Disparity, this); - driver_->addIsolatedCallback(pointCB, Source_Disparity, this); - driver_->addIsolatedCallback(rawCB, Source_Disparity | Source_Luma_Rectified_Left, this); - driver_->addIsolatedCallback(colorCB, Source_Chroma_Left | Source_Chroma_Rectified_Aux | Source_Chroma_Aux, this); - driver_->addIsolatedCallback(dispCB, Source_Disparity | Source_Disparity_Right | Source_Disparity_Cost, this); + if (has_left_camera_ || has_right_camera_ || has_aux_camera_) { + driver_->addIsolatedCallback(monoCB, Source_Luma_Left | Source_Luma_Right | Source_Luma_Aux, this); + driver_->addIsolatedCallback(rectCB, Source_Luma_Rectified_Left | Source_Luma_Rectified_Right | Source_Luma_Rectified_Aux, this); + } + + if (has_color_) { + if (has_aux_camera_) { + driver_->addIsolatedCallback(colorizeCB, Source_Luma_Rectified_Aux | Source_Chroma_Rectified_Aux | Source_Luma_Aux | + Source_Luma_Left | Source_Luma_Rectified_Left, this); + driver_->addIsolatedCallback(colorCB, Source_Chroma_Rectified_Aux | Source_Chroma_Aux, this); + } else { + driver_->addIsolatedCallback(colorizeCB, Source_Luma_Left | Source_Chroma_Left | Source_Luma_Rectified_Left, this); + driver_->addIsolatedCallback(colorCB, Source_Chroma_Left, this); + } + } + + if (has_left_camera_ && has_right_camera_) { + driver_->addIsolatedCallback(depthCB, Source_Disparity, this); + driver_->addIsolatedCallback(pointCB, Source_Disparity, this); + driver_->addIsolatedCallback(rawCB, Source_Disparity | Source_Luma_Rectified_Left, this); + driver_->addIsolatedCallback(dispCB, Source_Disparity | Source_Disparity_Right | Source_Disparity_Cost, this); + } } // @@ -715,13 +832,17 @@ Camera::Camera(Channel* driver, const std::string& tf_prefix) : driver_->addIsolatedCallback(histCB, allImageSources, this); // - // Add ground surface callbacks for S27/S30 cameras + // Add algorithm callbacks - if (can_support_ground_surface) { + if (can_support_ground_surface_) { driver_->addIsolatedCallback(groundSurfaceCB, Source_Ground_Surface_Class_Image, this); driver_->addIsolatedCallback(groundSurfaceSplineCB, this); } + if (can_support_apriltag_) { + driver_->addIsolatedCallback(apriltagCB, this); + } + // // Disable color point cloud strict frame syncing, if desired @@ -749,18 +870,34 @@ Camera::~Camera() } else { - driver_->removeIsolatedCallback(colorizeCB); - driver_->removeIsolatedCallback(monoCB); - driver_->removeIsolatedCallback(rectCB); - driver_->removeIsolatedCallback(depthCB); - driver_->removeIsolatedCallback(pointCB); - driver_->removeIsolatedCallback(rawCB); - driver_->removeIsolatedCallback(colorCB); - driver_->removeIsolatedCallback(dispCB); + if (has_left_camera_ || has_right_camera_ || has_aux_camera_) { + driver_->removeIsolatedCallback(monoCB); + driver_->removeIsolatedCallback(rectCB); + } + + if (has_color_) { + driver_->removeIsolatedCallback(colorizeCB); + driver_->removeIsolatedCallback(colorCB); + } + + if (has_left_camera_ && has_right_camera_) { + driver_->removeIsolatedCallback(depthCB); + driver_->removeIsolatedCallback(pointCB); + driver_->removeIsolatedCallback(rawCB); + driver_->removeIsolatedCallback(dispCB); + } + } + + if (can_support_ground_surface_) + { + driver_->removeIsolatedCallback(groundSurfaceCB); + driver_->removeIsolatedCallback(groundSurfaceSplineCB); } - driver_->removeIsolatedCallback(groundSurfaceCB); - driver_->removeIsolatedCallback(groundSurfaceSplineCB); + if (can_support_apriltag_) + { + driver_->removeIsolatedCallback(apriltagCB); + } } void Camera::borderClipChanged(const BorderClip &borderClipType, double borderClipValue) @@ -1542,8 +1679,6 @@ void Camera::pointCloudCallback(const image::Header& header) color_organized_point_cloud_.row_step = header.width * color_organized_point_cloud_.point_step; } - const Eigen::Matrix4d Q = stereo_calibration_manager_->Q(); - const Eigen::Vector3f invalid_point(std::numeric_limits::quiet_NaN(), std::numeric_limits::quiet_NaN(), std::numeric_limits::quiet_NaN()); @@ -1598,15 +1733,17 @@ void Camera::pointCloudCallback(const image::Header& header) rectified_color = std::move(rect_rgb_image); } + const auto left_camera_info = stereo_calibration_manager_->leftCameraInfo(frame_id_left_, t); + const auto right_camera_info = stereo_calibration_manager_->rightCameraInfo(frame_id_right_, t); + const auto aux_camera_info = stereo_calibration_manager_->auxCameraInfo(frame_id_rectified_aux_, t, + rectified_color.cols, rectified_color.rows); + // // Iterate through our disparity image once populating our pointcloud structures if we plan to publish them uint32_t packed_color = 0; - const double squared_max_range = pointcloud_max_range_ * pointcloud_max_range_; - - const double aux_T = has_aux_camera_ ? stereo_calibration_manager_->aux_T() : stereo_calibration_manager_->T(); - const double T = stereo_calibration_manager_->T(); + const float squared_max_range = pointcloud_max_range_ * pointcloud_max_range_; size_t valid_points = 0; for (size_t y = 0 ; y < header.height ; ++y) @@ -1615,17 +1752,17 @@ void Camera::pointCloudCallback(const image::Header& header) { const size_t index = y * header.width + x; - double disparity = 0.0f; + float disparity = 0.0f; switch(header.bitsPerPixel) { case 16: { - disparity = static_cast(reinterpret_cast(header.imageDataP)[index]) / 16.0f; + disparity = static_cast(reinterpret_cast(header.imageDataP)[index]) / 16.0f; break; } case 32: { - disparity = static_cast(reinterpret_cast(header.imageDataP)[index]); + disparity = static_cast(reinterpret_cast(header.imageDataP)[index]); break; } default: @@ -1635,6 +1772,9 @@ void Camera::pointCloudCallback(const image::Header& header) } } + const Eigen::Vector3f point = stereo_calibration_manager_->reproject(x, y, disparity, + left_camera_info, right_camera_info); + // // We have a valid rectified color image meaning we plan to publish color pointcloud topics. Assemble the // color pixel here since it may be needed in our organized pointclouds @@ -1643,10 +1783,11 @@ void Camera::pointCloudCallback(const image::Header& header) { packed_color = 0; - const double color_d = has_aux_camera_ ? (disparity * aux_T) / T : 0.0; + const auto color_pixel = (has_aux_camera_ && disparity != 0.0) ? + interpolate_color(stereo_calibration_manager_->rectifiedAuxProject(point, aux_camera_info), + rectified_color) : + rectified_color.at(y, x); - const auto color_pixel = has_aux_camera_ ? u_interpolate_color(std::max(x - color_d, 0.), y, rectified_color) : - rectified_color.at(y, x); packed_color |= color_pixel[2] << 16 | color_pixel[1] << 8 | color_pixel[0]; } @@ -1655,7 +1796,7 @@ void Camera::pointCloudCallback(const image::Header& header) // If our disparity is 0 pixels our corresponding 3D point is infinite. If we plan to publish organized // pointclouds we will need to add a invalid point to our pointcloud(s) - if (disparity == 0.0f || clipPoint(border_clip_type_, border_clip_value_, header.width, header.height, x, y)) + if (disparity == 0.0 || clipPoint(border_clip_type_, border_clip_value_, header.width, header.height, x, y)) { if (pub_organized_pointcloud) { @@ -1670,12 +1811,6 @@ void Camera::pointCloudCallback(const image::Header& header) continue; } - const Eigen::Vector3f point = ((Q * Eigen::Vector4d(static_cast(x), - static_cast(y), - disparity, - 1.0)).hnormalized()).cast(); - - const bool valid = isValidReprojectedPoint(point, squared_max_range); if (pub_pointcloud && valid) @@ -2080,6 +2215,61 @@ void Camera::groundSurfaceSplineCallback(const ground_surface::Header& header) ground_surface_spline_pub_.publish(ground_surface_utilities::eigenToPointcloud(eigen_pcl, frame_id_origin_)); } +void Camera::apriltagCallback(const apriltag::Header& header) +{ + // Convert camera timestamp to ROS timestamp + int64_t seconds = std::floor(header.timestamp / 1e9); + int64_t nanoseconds = header.timestamp - seconds * 1e9; + const ros::Time ros_time(static_cast(seconds), static_cast(nanoseconds)); + + // Publish Apriltags as ROS message + AprilTagDetectionArray tag_detection_array; + + tag_detection_array.header.seq = static_cast(header.frameId); + tag_detection_array.header.stamp = ros_time; + tag_detection_array.header.frame_id = frame_id_rectified_left_; + tag_detection_array.imageSource = header.imageSource; + tag_detection_array.timestamp = header.timestamp; + tag_detection_array.success = header.success; + tag_detection_array.numDetections = header.numDetections; + + for (auto &d : header.detections) + { + AprilTagDetection tag_detection; + + tag_detection.family = d.family; + tag_detection.id = d.id; + tag_detection.hamming = d.hamming; + tag_detection.decisionMargin = d.decisionMargin; + + for (unsigned int col = 0; col < 3; col++) + { + for (unsigned int row = 0; row < 3; row++) + { + tag_detection.tagToImageHomography[row + col * 3] = d.tagToImageHomography[row][col]; + } + } + + for (unsigned int i = 0; i < 2; i++) + { + tag_detection.center[i] = d.center[i]; + } + + for (unsigned int i = 0; i < 4; i++) + { + AprilTagCornerPoint point; + point.x = d.corners[i][0]; + point.y = d.corners[i][1]; + + tag_detection.corners[i] = point; + } + + tag_detection_array.detections.push_back(tag_detection); + } + + apriltag_detection_pub_.publish(tag_detection_array); +} + void Camera::updateConfig(const image::Config& config) { stereo_calibration_manager_->updateConfig(config); @@ -2146,36 +2336,30 @@ void Camera::publishAllCameraInfo() } else { // all other MultiSense-S* variations - if (system::DeviceInfo::HARDWARE_REV_MULTISENSE_ST21 != device_info_.hardwareRevision) { + if (has_left_camera_) { + left_mono_cam_info_pub_.publish(left_camera_info); + left_rect_cam_info_pub_.publish(left_rectified_camera_info); + } - if (has_aux_camera_) { + if (has_right_camera_) { + right_mono_cam_info_pub_.publish(right_camera_info); + right_rect_cam_info_pub_.publish(right_rectified_camera_info); + } - aux_mono_cam_info_pub_.publish(left_camera_info); - aux_rgb_cam_info_pub_.publish(left_camera_info); - aux_rect_cam_info_pub_.publish(left_rectified_camera_info); - aux_rgb_rect_cam_info_pub_.publish(left_rectified_camera_info); + // only publish for stereo cameras + if (has_left_camera_ && has_right_camera_) { - } else { + left_disp_cam_info_pub_.publish(left_rectified_camera_info); + depth_cam_info_pub_.publish(left_rectified_camera_info); - left_rgb_cam_info_pub_.publish(left_camera_info); - left_rgb_rect_cam_info_pub_.publish(left_rectified_camera_info); + if (version_info_.sensorFirmwareVersion >= 0x0300) { + right_disp_cam_info_pub_.publish(right_rectified_camera_info); + left_cost_cam_info_pub_.publish(left_rectified_camera_info); } } - if (version_info_.sensorFirmwareVersion >= 0x0300) { - - right_disp_cam_info_pub_.publish(right_rectified_camera_info); - left_cost_cam_info_pub_.publish(left_rectified_camera_info); - } - - left_mono_cam_info_pub_.publish(left_camera_info); - left_rect_cam_info_pub_.publish(left_rectified_camera_info); - right_mono_cam_info_pub_.publish(right_camera_info); - right_rect_cam_info_pub_.publish(right_rectified_camera_info); - left_disp_cam_info_pub_.publish(left_rectified_camera_info); - depth_cam_info_pub_.publish(left_rectified_camera_info); - + // handle color topic publishing if color data exists if (has_aux_camera_) { // @@ -2190,6 +2374,12 @@ void Camera::publishAllCameraInfo() aux_rgb_cam_info_pub_.publish(stereo_calibration_manager_->auxCameraInfo(frame_id_aux_, stamp, auxResolution)); aux_rgb_rect_cam_info_pub_.publish(stereo_calibration_manager_->auxCameraInfo(frame_id_rectified_aux_, stamp, stereoResolution)); + + } else if (has_color_ && !has_aux_camera_) { // !has_aux_camera_ is redundant, but leaving to prevent confusion over edge cases + + left_rgb_cam_info_pub_.publish(left_camera_info); + left_rgb_rect_cam_info_pub_.publish(left_rectified_camera_info); + } } } diff --git a/multisense_ros/src/camera_utilities.cpp b/multisense_ros/src/camera_utilities.cpp index d78815f..65e80b3 100644 --- a/multisense_ros/src/camera_utilities.cpp +++ b/multisense_ros/src/camera_utilities.cpp @@ -334,20 +334,86 @@ double StereoCalibrationManger::T() const return right_camera_info_.P[3] / right_camera_info_.P[0]; } -double StereoCalibrationManger::aux_T() const +Eigen::Vector3d StereoCalibrationManger::aux_T() const { std::lock_guard lock(mutex_); // // The aux camera projection matrix is of the form: // - // [fx, 0, cx, t * fx] - // [ 0, fy, cy, 0 ] - // [ 0, 0, 1, 0 ] + // [fx, 0, cx, tx * fx] + // [ 0, fy, cy, ty * fy] + // [ 0, 0, 1, tz ] // // divide the t * fx term by fx to get t - return aux_camera_info_.P[3] / aux_camera_info_.P[0]; + return Eigen::Vector3d{aux_camera_info_.P[3] / aux_camera_info_.P[0], + aux_camera_info_.P[7] / aux_camera_info_.P[5], + aux_camera_info_.P[11]}; +} + +Eigen::Vector3f StereoCalibrationManger::reproject(size_t u, size_t v, double d) const +{ + std::lock_guard lock(mutex_); + + return reproject(u, v, d, left_camera_info_, right_camera_info_); +} + +Eigen::Vector3f StereoCalibrationManger::reproject(size_t u, + size_t v, + double d, + const sensor_msgs::CameraInfo &left_camera_info, + const sensor_msgs::CameraInfo &right_camera_info) const +{ + if (d == 0.0) + { + return Eigen::Vector3f{std::numeric_limits::max(), + std::numeric_limits::max(), + std::numeric_limits::max()}; + } + + + const double &fx = left_camera_info.P[0]; + const double &fy = left_camera_info.P[5]; + const double &cx = left_camera_info.P[2]; + const double &cy = left_camera_info.P[6]; + const double &cx_right = right_camera_info.P[2]; + const double tx = right_camera_info.P[3] / right_camera_info.P[0]; + + const double xB = ((fy * tx) * u) + (-fy * cx * tx); + const double yB = ((fx * tx) * v) + (-fx * cy * tx); + const double zB = (fx * fy * tx); + const double invB = 1. / (-fy * d) + (fy * (cx - cx_right)); + + return Eigen::Vector3f{static_cast(xB * invB), static_cast(yB * invB), static_cast(zB * invB)}; +} + +Eigen::Vector2f StereoCalibrationManger::rectifiedAuxProject(const Eigen::Vector3f &left_rectified_point) const +{ + std::lock_guard lock(mutex_); + + return rectifiedAuxProject(left_rectified_point, aux_camera_info_); +} + +Eigen::Vector2f StereoCalibrationManger::rectifiedAuxProject(const Eigen::Vector3f &left_rectified_point, + const sensor_msgs::CameraInfo &aux_camera_info) const +{ + const double &fx = aux_camera_info.P[0]; + const double &fy = aux_camera_info.P[5]; + const double &cx = aux_camera_info.P[2]; + const double &cy = aux_camera_info.P[6]; + const double &fxtx = aux_camera_info.P[3]; + const double &fyty = aux_camera_info.P[7]; + const double &tz = aux_camera_info.P[11]; + + // + // Project the left_rectified_point into the aux image using the auxP matrix + // + const double uB = (fx * left_rectified_point(0) + (cx * left_rectified_point(2)) + fxtx); + const double vB = (fy * left_rectified_point(1) + (cy * left_rectified_point(2)) + fyty); + const double invB = 1.0 / (left_rectified_point(2) + tz); + + return Eigen::Vector2f{static_cast(uB * invB), static_cast(vB * invB)}; } OperatingResolutionT StereoCalibrationManger::operatingStereoResolution() const @@ -374,7 +440,8 @@ OperatingResolutionT StereoCalibrationManger::operatingAuxResolution() const bool StereoCalibrationManger::validAux() const { - return std::isfinite(aux_T()); + const Eigen::Vector3d auxT = aux_T(); + return std::isfinite(auxT(0)) && std::isfinite(auxT(1)) && std::isfinite(auxT(2)) ; } sensor_msgs::CameraInfo StereoCalibrationManger::leftCameraInfo(const std::string& frame_id, diff --git a/multisense_ros/src/reconfigure.cpp b/multisense_ros/src/reconfigure.cpp index 6841d0e..b56cb9d 100644 --- a/multisense_ros/src/reconfigure.cpp +++ b/multisense_ros/src/reconfigure.cpp @@ -87,11 +87,16 @@ Reconfigure::Reconfigure(Channel* driver, return; } + // Check which algorithms may be supported const bool ground_surface_supported = std::any_of(deviceModes.begin(), deviceModes.end(), [](const auto &mode) { return (mode.supportedDataSources & Source_Ground_Surface_Spline_Data) && (mode.supportedDataSources & Source_Ground_Surface_Class_Image); }); + const bool apriltag_supported = + std::any_of(deviceModes.begin(), deviceModes.end(), [](const auto &mode) { + return mode.supportedDataSources & Source_AprilTag_Detections; }); + if (deviceInfo.lightingType != 0 || system::DeviceInfo::HARDWARE_REV_MULTISENSE_KS21 == deviceInfo.hardwareRevision) { lighting_supported_ = true; @@ -102,7 +107,10 @@ Reconfigure::Reconfigure(Channel* driver, } if (system::DeviceInfo::HARDWARE_REV_MULTISENSE_C6S2_S27 == deviceInfo.hardwareRevision || system::DeviceInfo::HARDWARE_REV_MULTISENSE_S30 == deviceInfo.hardwareRevision || - system::DeviceInfo::HARDWARE_REV_MULTISENSE_KS21 == deviceInfo.hardwareRevision) + system::DeviceInfo::HARDWARE_REV_MULTISENSE_KS21 == deviceInfo.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_REMOTE_HEAD_VPB == deviceInfo.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_REMOTE_HEAD_STEREO == deviceInfo.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_REMOTE_HEAD_MONOCAM == deviceInfo.hardwareRevision) { ptp_supported_ = true; } @@ -169,6 +177,12 @@ Reconfigure::Reconfigure(Channel* driver, new dynamic_reconfigure::Server(device_nh_)); server_s27_AR0234_ground_surface_->setCallback(std::bind(&Reconfigure::callback_s27_AR0234_ground_surface, this, std::placeholders::_1, std::placeholders::_2)); + } else if (apriltag_supported) { + server_s27_AR0234_apriltag_ = + std::shared_ptr< dynamic_reconfigure::Server > ( + new dynamic_reconfigure::Server(device_nh_)); + server_s27_AR0234_apriltag_->setCallback(std::bind(&Reconfigure::callback_s27_AR0234_apriltag, this, + std::placeholders::_1, std::placeholders::_2)); } else { server_s27_AR0234_ = std::shared_ptr< dynamic_reconfigure::Server > ( @@ -184,6 +198,12 @@ Reconfigure::Reconfigure(Channel* driver, new dynamic_reconfigure::Server(device_nh_)); server_ks21_sgm_AR0234_ground_surface_->setCallback(std::bind(&Reconfigure::callback_ks21_AR0234_ground_surface, this, std::placeholders::_1, std::placeholders::_2)); + } else if (apriltag_supported) { + server_ks21_sgm_AR0234_apriltag_ = + std::shared_ptr< dynamic_reconfigure::Server > ( + new dynamic_reconfigure::Server(device_nh_)); + server_ks21_sgm_AR0234_apriltag_->setCallback(std::bind(&Reconfigure::callback_ks21_AR0234_apriltag, this, + std::placeholders::_1, std::placeholders::_2)); } else { server_ks21_sgm_AR0234_ = std::shared_ptr< dynamic_reconfigure::Server > ( @@ -191,6 +211,32 @@ Reconfigure::Reconfigure(Channel* driver, server_ks21_sgm_AR0234_->setCallback(std::bind(&Reconfigure::callback_ks21_AR0234, this, std::placeholders::_1, std::placeholders::_2)); } + } else if (system::DeviceInfo::HARDWARE_REV_MULTISENSE_REMOTE_HEAD_VPB == deviceInfo.hardwareRevision) { + server_remote_head_vpb_ = + std::shared_ptr< dynamic_reconfigure::Server > ( + new dynamic_reconfigure::Server(device_nh_)); + server_remote_head_vpb_->setCallback(std::bind(&Reconfigure::callback_remote_head_vpb, this, + std::placeholders::_1, std::placeholders::_2)); + } else if (system::DeviceInfo::HARDWARE_REV_MULTISENSE_REMOTE_HEAD_STEREO == deviceInfo.hardwareRevision) { + if (ground_surface_supported) { + server_remote_head_sgm_AR0234_ground_surface_ = + std::shared_ptr< dynamic_reconfigure::Server > ( + new dynamic_reconfigure::Server(device_nh_)); + server_remote_head_sgm_AR0234_ground_surface_->setCallback(std::bind(&Reconfigure::callback_remote_head_sgm_AR0234_ground_surface, this, + std::placeholders::_1, std::placeholders::_2)); + } else { + server_remote_head_sgm_AR0234_ = + std::shared_ptr< dynamic_reconfigure::Server > ( + new dynamic_reconfigure::Server(device_nh_)); + server_remote_head_sgm_AR0234_->setCallback(std::bind(&Reconfigure::callback_remote_head_sgm_AR0234, this, + std::placeholders::_1, std::placeholders::_2)); + } + } else if (system::DeviceInfo::HARDWARE_REV_MULTISENSE_REMOTE_HEAD_MONOCAM == deviceInfo.hardwareRevision) { + server_remote_head_monocam_AR0234_ = + std::shared_ptr< dynamic_reconfigure::Server > ( + new dynamic_reconfigure::Server(device_nh_)); + server_remote_head_monocam_AR0234_->setCallback(std::bind(&Reconfigure::callback_remote_head_monocam_AR0234, this, + std::placeholders::_1, std::placeholders::_2)); } else if (versionInfo.sensorFirmwareVersion <= 0x0202) { switch(deviceInfo.imagerType) { @@ -280,6 +326,12 @@ Reconfigure::Reconfigure(Channel* driver, new dynamic_reconfigure::Server(device_nh_)); server_s27_AR0234_ground_surface_->setCallback(std::bind(&Reconfigure::callback_s27_AR0234_ground_surface, this, std::placeholders::_1, std::placeholders::_2)); + } else if (apriltag_supported) { + server_s27_AR0234_apriltag_ = + std::shared_ptr< dynamic_reconfigure::Server > ( + new dynamic_reconfigure::Server(device_nh_)); + server_s27_AR0234_apriltag_->setCallback(std::bind(&Reconfigure::callback_s27_AR0234_apriltag, this, + std::placeholders::_1, std::placeholders::_2)); } else { server_s27_AR0234_ = std::shared_ptr< dynamic_reconfigure::Server > ( @@ -736,27 +788,30 @@ template void Reconfigure::configurePtp(const T& dyn) } } -template void Reconfigure::configureStereoProfile(crl::multisense::image::Config &cfg, const T& dyn) +template void Reconfigure::configureStereoProfile(crl::multisense::CameraProfile &profile, const T& dyn) { - crl::multisense::CameraProfile profile = crl::multisense::User_Control; - profile |= (dyn.detail_disparity_profile ? crl::multisense::Detail_Disparity : profile); profile |= (dyn.high_contrast_profile ? crl::multisense::High_Contrast : profile); profile |= (dyn.show_roi_profile ? crl::multisense::Show_ROIs : profile); - profile |= (dyn.full_res_aux_profile ? crl::multisense::Full_Res_Aux_Cam : profile); +} + +template void Reconfigure::configureGroundSurfaceStereoProfile(crl::multisense::CameraProfile &profile, const T& dyn) +{ + profile |= (dyn.ground_surface_profile ? crl::multisense::Ground_Surface : profile); +} - cfg.setCameraProfile(profile); +template void Reconfigure::configureApriltagStereoProfile(crl::multisense::CameraProfile &profile, const T& dyn) +{ + profile |= (dyn.apriltag_profile ? crl::multisense::AprilTag : profile); } -template void Reconfigure::configureStereoProfileWithGroundSurface(crl::multisense::image::Config &cfg, const T& dyn) +template void Reconfigure::configureFullResAuxStereoProfile(crl::multisense::CameraProfile &profile, const T& dyn) { - crl::multisense::CameraProfile profile = crl::multisense::User_Control; - profile |= (dyn.detail_disparity_profile ? crl::multisense::Detail_Disparity : profile); - profile |= (dyn.high_contrast_profile ? crl::multisense::High_Contrast : profile); - profile |= (dyn.show_roi_profile ? crl::multisense::Show_ROIs : profile); profile |= (dyn.full_res_aux_profile ? crl::multisense::Full_Res_Aux_Cam : profile); - profile |= (dyn.ground_surface_profile ? crl::multisense::Ground_Surface : profile); +} - cfg.setCameraProfile(profile); +template void Reconfigure::configureDetailDisparityStereoProfile(crl::multisense::CameraProfile &profile, const T& dyn) +{ + profile |= (dyn.detail_disparity_profile ? crl::multisense::Detail_Disparity : profile); } template void Reconfigure::configureExtrinsics(const T& dyn) @@ -789,7 +844,7 @@ template void Reconfigure::configureExtrinsics(const T& dyn) template void Reconfigure::configureGroundSurfaceParams(const T& dyn) { // - // Update calibration on camera via libmultisense + // Update ground surface params on camera via libmultisense crl::multisense::system::GroundSurfaceParams params; params.ground_surface_number_of_levels_x = dyn.ground_surface_spline_resolution_x; @@ -836,6 +891,28 @@ template void Reconfigure::configureGroundSurfaceParams(const T& dyn) ); } +template void Reconfigure::configureApriltagParams(const T& dyn) +{ + // + // Update apriltag params on camera via libmultisense + crl::multisense::system::ApriltagParams params; + + params.family = dyn.apriltag_family; + params.max_hamming = dyn.apriltag_max_hamming; + params.quad_detection_blur_sigma = dyn.apriltag_quad_detection_blur_sigma; + params.quad_detection_decimate = dyn.apriltag_quad_detection_decimate; + params.min_border_width = dyn.apriltag_min_border_width; + params.refine_quad_edges = dyn.apriltag_refine_quad_edges; + params.decode_sharpening = dyn.apriltag_decode_sharpening; + + Status status = driver_->setApriltagParams(params); + if (Status_Ok != status) { + ROS_ERROR("Reconfigure: failed to set apriltag params: %s", + Channel::statusString(status)); + return; + } +} + #define GET_CONFIG() \ image::Config cfg; \ Status status = driver_->getImageConfig(cfg); \ @@ -920,7 +997,11 @@ template void Reconfigure::configureGroundSurfaceParams(const T& dyn) #define S27_SGM() do { \ GET_CONFIG(); \ configureSgm(cfg, dyn); \ - configureStereoProfile(cfg, dyn); \ + crl::multisense::CameraProfile profile = crl::multisense::User_Control; \ + configureStereoProfile(profile, dyn); \ + configureDetailDisparityStereoProfile(profile, dyn); \ + configureFullResAuxStereoProfile(profile, dyn); \ + cfg.setCameraProfile(profile); \ configureAutoWhiteBalance(cfg, dyn); \ configureAuxCamera(cfg, dyn); \ configureCamera(cfg, dyn); \ @@ -930,22 +1011,34 @@ template void Reconfigure::configureGroundSurfaceParams(const T& dyn) configureExtrinsics(dyn); \ } while(0) -#define KS21_SGM() do { \ +#define S27_SGM_GROUND_SURFACE() do { \ GET_CONFIG(); \ configureSgm(cfg, dyn); \ - configureStereoProfile(cfg, dyn); \ + crl::multisense::CameraProfile profile = crl::multisense::User_Control; \ + configureStereoProfile(profile, dyn); \ + configureDetailDisparityStereoProfile(profile, dyn); \ + configureGroundSurfaceStereoProfile(profile, dyn); \ + configureFullResAuxStereoProfile(profile, dyn); \ + cfg.setCameraProfile(profile); \ + configureAutoWhiteBalance(cfg, dyn); \ + configureAuxCamera(cfg, dyn); \ configureCamera(cfg, dyn); \ configureBorderClip(dyn); \ - configureLeds(dyn); \ configurePtp(dyn); \ configurePointCloudRange(dyn); \ configureExtrinsics(dyn); \ + configureGroundSurfaceParams(dyn); \ } while(0) -#define S27_SGM_GROUND_SURFACE() do { \ +#define S27_SGM_APRILTAG() do { \ GET_CONFIG(); \ configureSgm(cfg, dyn); \ - configureStereoProfileWithGroundSurface(cfg, dyn); \ + crl::multisense::CameraProfile profile = crl::multisense::User_Control; \ + configureStereoProfile(profile, dyn); \ + configureDetailDisparityStereoProfile(profile, dyn); \ + configureApriltagStereoProfile(profile, dyn); \ + configureFullResAuxStereoProfile(profile, dyn); \ + cfg.setCameraProfile(profile); \ configureAutoWhiteBalance(cfg, dyn); \ configureAuxCamera(cfg, dyn); \ configureCamera(cfg, dyn); \ @@ -953,13 +1046,32 @@ template void Reconfigure::configureGroundSurfaceParams(const T& dyn) configurePtp(dyn); \ configurePointCloudRange(dyn); \ configureExtrinsics(dyn); \ - configureGroundSurfaceParams(dyn); \ + configureApriltagParams(dyn); \ + } while(0) + +#define KS21_SGM() do { \ + GET_CONFIG(); \ + configureSgm(cfg, dyn); \ + crl::multisense::CameraProfile profile = crl::multisense::User_Control; \ + configureStereoProfile(profile, dyn); \ + configureDetailDisparityStereoProfile(profile, dyn); \ + cfg.setCameraProfile(profile); \ + configureCamera(cfg, dyn); \ + configureBorderClip(dyn); \ + configureLeds(dyn); \ + configurePtp(dyn); \ + configurePointCloudRange(dyn); \ + configureExtrinsics(dyn); \ } while(0) #define KS21_SGM_GROUND_SURFACE() do { \ GET_CONFIG(); \ configureSgm(cfg, dyn); \ - configureStereoProfileWithGroundSurface(cfg, dyn); \ + crl::multisense::CameraProfile profile = crl::multisense::User_Control; \ + configureStereoProfile(profile, dyn); \ + configureDetailDisparityStereoProfile(profile, dyn); \ + configureGroundSurfaceStereoProfile(profile, dyn); \ + cfg.setCameraProfile(profile); \ configureCamera(cfg, dyn); \ configureBorderClip(dyn); \ configureLeds(dyn); \ @@ -969,23 +1081,112 @@ template void Reconfigure::configureGroundSurfaceParams(const T& dyn) configureGroundSurfaceParams(dyn); \ } while(0) +#define KS21_SGM_APRILTAG() do { \ + GET_CONFIG(); \ + configureSgm(cfg, dyn); \ + crl::multisense::CameraProfile profile = crl::multisense::User_Control; \ + configureStereoProfile(profile, dyn); \ + configureDetailDisparityStereoProfile(profile, dyn); \ + configureApriltagStereoProfile(profile, dyn); \ + cfg.setCameraProfile(profile); \ + configureCamera(cfg, dyn); \ + configureBorderClip(dyn); \ + configureLeds(dyn); \ + configurePtp(dyn); \ + configurePointCloudRange(dyn); \ + configureExtrinsics(dyn); \ + configureApriltagParams(dyn); \ + } while(0) + +#define REMOTE_HEAD_VPB() do { \ + GET_CONFIG(); \ + configurePtp(dyn); \ + configureExtrinsics(dyn); \ + } while(0) + +#define REMOTE_HEAD_SGM_AR0234() do { \ + GET_CONFIG(); \ + configureSgm(cfg, dyn); \ + crl::multisense::CameraProfile profile = crl::multisense::User_Control; \ + configureStereoProfile(profile, dyn); \ + configureDetailDisparityStereoProfile(profile, dyn); \ + cfg.setCameraProfile(profile); \ + configureCamera(cfg, dyn); \ + configureBorderClip(dyn); \ + configureLeds(dyn); \ + configurePtp(dyn); \ + configurePointCloudRange(dyn); \ + configureExtrinsics(dyn); \ + } while(0) + +#define REMOTE_HEAD_SGM_AR0234_GROUND_SURFACE() do { \ + GET_CONFIG(); \ + configureSgm(cfg, dyn); \ + crl::multisense::CameraProfile profile = crl::multisense::User_Control; \ + configureStereoProfile(profile, dyn); \ + configureDetailDisparityStereoProfile(profile, dyn); \ + configureGroundSurfaceStereoProfile(profile, dyn); \ + cfg.setCameraProfile(profile); \ + configureCamera(cfg, dyn); \ + configureBorderClip(dyn); \ + configureLeds(dyn); \ + configurePtp(dyn); \ + configurePointCloudRange(dyn); \ + configureExtrinsics(dyn); \ + configureGroundSurfaceParams(dyn); \ + } while(0) + +#define REMOTE_HEAD_SGM_AR0234_APRILTAG() do { \ + GET_CONFIG(); \ + configureSgm(cfg, dyn); \ + crl::multisense::CameraProfile profile = crl::multisense::User_Control; \ + configureStereoProfile(profile, dyn); \ + configureDetailDisparityStereoProfile(profile, dyn); \ + configureApriltagStereoProfile(profile, dyn); \ + cfg.setCameraProfile(profile); \ + configureCamera(cfg, dyn); \ + configureBorderClip(dyn); \ + configureLeds(dyn); \ + configurePtp(dyn); \ + configurePointCloudRange(dyn); \ + configureExtrinsics(dyn); \ + configureApriltagParams(dyn); \ + } while(0) + +#define REMOTE_HEAD_MONOCAM_AR0234() do { \ + GET_CONFIG(); \ + crl::multisense::CameraProfile profile = crl::multisense::User_Control; \ + configureStereoProfile(profile, dyn); \ + cfg.setCameraProfile(profile); \ + configureCamera(cfg, dyn); \ + configureLeds(dyn); \ + configurePtp(dyn); \ + configureExtrinsics(dyn); \ + } while(0) // // The dynamic reconfigure callbacks (MultiSense S* & feature variations) -void Reconfigure::callback_sl_bm_cmv2000 (multisense_ros::sl_bm_cmv2000Config& dyn, uint32_t level) { (void) level; SL_BM(); } -void Reconfigure::callback_sl_bm_cmv2000_imu (multisense_ros::sl_bm_cmv2000_imuConfig& dyn, uint32_t level) { (void) level; SL_BM_IMU(); } -void Reconfigure::callback_sl_bm_cmv4000 (multisense_ros::sl_bm_cmv4000Config& dyn, uint32_t level) { (void) level; SL_BM(); } -void Reconfigure::callback_sl_bm_cmv4000_imu (multisense_ros::sl_bm_cmv4000_imuConfig& dyn, uint32_t level) { (void) level; SL_BM_IMU(); } -void Reconfigure::callback_sl_sgm_cmv2000_imu(multisense_ros::sl_sgm_cmv2000_imuConfig& dyn, uint32_t level) { (void) level; SL_SGM_IMU(); } -void Reconfigure::callback_sl_sgm_cmv4000_imu(multisense_ros::sl_sgm_cmv4000_imuConfig& dyn, uint32_t level) { (void) level; SL_SGM_IMU_CMV4000(); } -void Reconfigure::callback_mono_cmv2000 (multisense_ros::mono_cmv2000Config& dyn, uint32_t level) { (void) level; MONO_BM_IMU(); } -void Reconfigure::callback_mono_cmv4000 (multisense_ros::mono_cmv4000Config& dyn, uint32_t level) { (void) level; MONO_BM_IMU(); } -void Reconfigure::callback_s27_AR0234 (multisense_ros::s27_sgm_AR0234Config& dyn, uint32_t level) { (void) level; S27_SGM(); } -void Reconfigure::callback_ks21_AR0234 (multisense_ros::ks21_sgm_AR0234Config& dyn, uint32_t level) { (void) level; KS21_SGM(); } - -void Reconfigure::callback_s27_AR0234_ground_surface (multisense_ros::s27_sgm_AR0234_ground_surfaceConfig& dyn, uint32_t level) { (void) level; S27_SGM_GROUND_SURFACE(); } -void Reconfigure::callback_ks21_AR0234_ground_surface (multisense_ros::ks21_sgm_AR0234_ground_surfaceConfig& dyn, uint32_t level) { (void) level; KS21_SGM_GROUND_SURFACE(); } +void Reconfigure::callback_sl_bm_cmv2000 (multisense_ros::sl_bm_cmv2000Config& dyn, uint32_t level) { (void) level; SL_BM(); } +void Reconfigure::callback_sl_bm_cmv2000_imu (multisense_ros::sl_bm_cmv2000_imuConfig& dyn, uint32_t level) { (void) level; SL_BM_IMU(); } +void Reconfigure::callback_sl_bm_cmv4000 (multisense_ros::sl_bm_cmv4000Config& dyn, uint32_t level) { (void) level; SL_BM(); } +void Reconfigure::callback_sl_bm_cmv4000_imu (multisense_ros::sl_bm_cmv4000_imuConfig& dyn, uint32_t level) { (void) level; SL_BM_IMU(); } +void Reconfigure::callback_sl_sgm_cmv2000_imu (multisense_ros::sl_sgm_cmv2000_imuConfig& dyn, uint32_t level) { (void) level; SL_SGM_IMU(); } +void Reconfigure::callback_sl_sgm_cmv4000_imu (multisense_ros::sl_sgm_cmv4000_imuConfig& dyn, uint32_t level) { (void) level; SL_SGM_IMU_CMV4000(); } +void Reconfigure::callback_mono_cmv2000 (multisense_ros::mono_cmv2000Config& dyn, uint32_t level) { (void) level; MONO_BM_IMU(); } +void Reconfigure::callback_mono_cmv4000 (multisense_ros::mono_cmv4000Config& dyn, uint32_t level) { (void) level; MONO_BM_IMU(); } + +void Reconfigure::callback_s27_AR0234 (multisense_ros::s27_sgm_AR0234Config& dyn, uint32_t level) { (void) level; S27_SGM(); } +void Reconfigure::callback_s27_AR0234_ground_surface (multisense_ros::s27_sgm_AR0234_ground_surfaceConfig& dyn, uint32_t level) { (void) level; S27_SGM_GROUND_SURFACE(); } +void Reconfigure::callback_s27_AR0234_apriltag (multisense_ros::s27_sgm_AR0234_apriltagConfig& dyn, uint32_t level) { (void) level; S27_SGM_APRILTAG(); } +void Reconfigure::callback_ks21_AR0234 (multisense_ros::ks21_sgm_AR0234Config& dyn, uint32_t level) { (void) level; KS21_SGM(); } +void Reconfigure::callback_ks21_AR0234_ground_surface(multisense_ros::ks21_sgm_AR0234_ground_surfaceConfig& dyn, uint32_t level) { (void) level; KS21_SGM_GROUND_SURFACE(); } +void Reconfigure::callback_ks21_AR0234_apriltag (multisense_ros::ks21_sgm_AR0234_apriltagConfig& dyn, uint32_t level) { (void) level; KS21_SGM_APRILTAG(); } +void Reconfigure::callback_remote_head_vpb (multisense_ros::remote_head_vpbConfig& dyn, uint32_t level) { (void) level; REMOTE_HEAD_VPB(); } +void Reconfigure::callback_remote_head_sgm_AR0234 (multisense_ros::remote_head_sgm_AR0234Config& dyn, uint32_t level) { (void) level; REMOTE_HEAD_SGM_AR0234(); } +void Reconfigure::callback_remote_head_sgm_AR0234_ground_surface(multisense_ros::remote_head_sgm_AR0234_ground_surfaceConfig& dyn, uint32_t level) { (void) level; REMOTE_HEAD_SGM_AR0234_GROUND_SURFACE(); } +void Reconfigure::callback_remote_head_sgm_AR0234_apriltag (multisense_ros::remote_head_sgm_AR0234_apriltagConfig& dyn, uint32_t level) { (void) level; REMOTE_HEAD_SGM_AR0234_APRILTAG(); } +void Reconfigure::callback_remote_head_monocam_AR0234 (multisense_ros::remote_head_monocam_AR0234Config& dyn, uint32_t level) { (void) level; REMOTE_HEAD_MONOCAM_AR0234(); } // // BCAM (Sony IMX104) diff --git a/multisense_ros/src/ros_driver.cpp b/multisense_ros/src/ros_driver.cpp index 98d46b7..2becb42 100644 --- a/multisense_ros/src/ros_driver.cpp +++ b/multisense_ros/src/ros_driver.cpp @@ -55,20 +55,41 @@ int main(int argc, char** argvPP) std::string sensor_ip; std::string tf_prefix; int sensor_mtu; + int head_id; nh_private_.param("sensor_ip", sensor_ip, "10.66.171.21"); nh_private_.param("tf_prefix", tf_prefix, "multisense"); nh_private_.param("sensor_mtu", sensor_mtu, 7200); + nh_private_.param("head_id", head_id, 0); Channel *d = NULL; try { - d = Channel::Create(sensor_ip); - if (NULL == d) { - ROS_ERROR("multisense_ros: failed to create communication channel to sensor @ \"%s\"", - sensor_ip.c_str()); + RemoteHeadChannel rh_channel = 0; + switch(head_id) { + case 0: + rh_channel = Remote_Head_0; + break; + case 1: + rh_channel = Remote_Head_1; + break; + case 2: + rh_channel = Remote_Head_2; + break; + case 3: + rh_channel = Remote_Head_3; + break; + default: + rh_channel = Remote_Head_VPB; + break; + } + + d = Channel::Create(sensor_ip, rh_channel); + if (NULL == d) { + ROS_ERROR("multisense_ros: failed to create communication channel to sensor @ \"%s\" with head ID %d", + sensor_ip.c_str(), rh_channel); return -2; }