Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(hesai): add filtered pointcloud counter function #247

Open
wants to merge 26 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
26 commits
Select commit Hold shift + click to select a range
52eb0a2
add filterd pointcloud counter function
ike-kazu Dec 20, 2024
4379451
ci(pre-commit): autofix
pre-commit-ci[bot] Dec 20, 2024
1e366d6
del submodule
ike-kazu Dec 20, 2024
c20d6bf
fix type
ike-kazu Dec 20, 2024
5cd9da2
fix reviewed points
ike-kazu Dec 27, 2024
aaa3495
ci(pre-commit): autofix
pre-commit-ci[bot] Dec 27, 2024
ff8b7f5
fix reviewed points
ike-kazu Dec 27, 2024
2706c08
ci(pre-commit): autofix
pre-commit-ci[bot] Dec 27, 2024
d83d65c
fix typo
ike-kazu Dec 27, 2024
411a9ba
fix reviewed points
ike-kazu Jan 10, 2025
b5fddfb
del submodule
ike-kazu Jan 10, 2025
6a62c3d
ci(pre-commit): autofix
pre-commit-ci[bot] Jan 10, 2025
cf3ddb5
fix json struct
ike-kazu Jan 10, 2025
92db91d
ci(pre-commit): autofix
pre-commit-ci[bot] Jan 10, 2025
915dc32
fix output layout
ike-kazu Feb 28, 2025
4675321
del comment-out
ike-kazu Feb 28, 2025
c2ea817
change json name
ike-kazu Feb 28, 2025
87ff4eb
Merge branch 'main' into feat/filterd_points_counter_first_stage
mojomex Mar 5, 2025
6cc219e
ci(pre-commit): autofix
pre-commit-ci[bot] Mar 5, 2025
0fa5a9b
chore: remove wrongly committed submodules
mojomex Mar 5, 2025
f77a721
chore: clean up decode filter info, add integraion for mask filter
mojomex Mar 5, 2025
1fdf864
chore: revert wrongly committed Pandar40P range setting
mojomex Mar 5, 2025
d0e82a8
chore: fix mask filter integration
mojomex Mar 5, 2025
247e9ac
chore: rename to PointcloudDecodeStatistics
mojomex Mar 5, 2025
24bc2e9
chore: remove print and pass decode_statistics along with pointcloud …
mojomex Mar 5, 2025
b030c22
chore: fix wrong timestamp type in Hesai tests
mojomex Mar 5, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -24,13 +24,16 @@
#include <nebula_common/loggers/logger.hpp>
#include <nebula_common/nebula_common.hpp>
#include <nebula_common/point_types.hpp>
#include <nlohmann/json.hpp>
#include <rclcpp/logging.hpp>
#include <rclcpp/rclcpp.hpp>

#include <algorithm>
#include <array>
#include <cmath>
#include <cstdint>
#include <iostream>
#include <limits>
#include <memory>
#include <optional>
#include <tuple>
Expand All @@ -43,6 +46,104 @@
template <typename SensorT>
class HesaiDecoder : public HesaiScanDecoder
{
struct PointcloudDecodeStatistics
{
explicit PointcloudDecodeStatistics(bool has_downsample_mask_filter)
: downsample_mask_filtered_count(
has_downsample_mask_filter ? std::make_optional(0) : std::nullopt)
{
}

uint64_t distance_filtered_count = 0;
uint64_t fov_filtered_count = 0;
uint64_t invalid_point_count = 0;
uint64_t identical_filtered_count = 0;
uint64_t multiple_return_filtered_count = 0;
std::optional<uint64_t> downsample_mask_filtered_count = 0;

uint64_t total_kept_point_count = 0;
uint64_t invalid_packet_count = 0;

float cloud_distance_min_m = std::numeric_limits<float>::infinity();
float cloud_distance_max_m = -std::numeric_limits<float>::infinity();
float cloud_azimuth_min_deg = std::numeric_limits<float>::infinity();
float cloud_azimuth_max_deg = -std::numeric_limits<float>::infinity();
uint32_t packet_timestamp_min_ns = std::numeric_limits<uint32_t>::max();
uint32_t packet_timestamp_max_ns = std::numeric_limits<uint32_t>::min();

[[nodiscard]] nlohmann::ordered_json to_json() const
{
nlohmann::ordered_json distance_j;
distance_j["name"] = "distance";
distance_j["filtered_count"] = distance_filtered_count;

nlohmann::ordered_json fov_j;
fov_j["name"] = "fov";
fov_j["filtered_count"] = fov_filtered_count;

nlohmann::ordered_json identical_j;
identical_j["name"] = "identical";
identical_j["filtered_count"] = identical_filtered_count;

nlohmann::ordered_json multiple_j;
multiple_j["name"] = "multiple";
multiple_j["filtered_count"] = multiple_return_filtered_count;

nlohmann::ordered_json invalid_j;
invalid_j["name"] = "invalid";
invalid_j["filtered_count"] = invalid_point_count;

nlohmann::ordered_json pointcloud_bounds_azimuth_j;
pointcloud_bounds_azimuth_j["min"] = cloud_azimuth_min_deg;
pointcloud_bounds_azimuth_j["max"] = cloud_azimuth_max_deg;

nlohmann::ordered_json pointcloud_bounds_distance_j;
pointcloud_bounds_distance_j["min"] = cloud_distance_min_m;
pointcloud_bounds_distance_j["max"] = cloud_distance_max_m;

nlohmann::ordered_json pointcloud_bounds_timestamp_j;
pointcloud_bounds_timestamp_j["min"] = packet_timestamp_min_ns;
pointcloud_bounds_timestamp_j["max"] = packet_timestamp_max_ns;

nlohmann::ordered_json j;
j["filter_pipeline"] = nlohmann::ordered_json::array({
invalid_j,
distance_j,
fov_j,
identical_j,
multiple_j,
});

if (downsample_mask_filtered_count.has_value()) {
nlohmann::ordered_json downsample_j;
downsample_j["name"] = "downsample_mask";
downsample_j["filtered_count"] = downsample_mask_filtered_count.value();
j["filter_pipeline"].push_back(downsample_j);
}

j["pointcloud_bounds"] = {
{"azimuth_deg", pointcloud_bounds_azimuth_j},
{"distance_m", pointcloud_bounds_distance_j},
{"timestamp_ns", pointcloud_bounds_timestamp_j},
};

j["invalid_packet_count"] = invalid_packet_count;
j["total_kept_point_count"] = total_kept_point_count;

return j;
}

void update_pointcloud_bounds(const NebulaPoint & point)
{
cloud_azimuth_min_deg = std::min(cloud_azimuth_min_deg, rad2deg(point.azimuth));
cloud_azimuth_max_deg = std::max(cloud_azimuth_max_deg, rad2deg(point.azimuth));
packet_timestamp_min_ns = std::min(packet_timestamp_min_ns, point.time_stamp);
packet_timestamp_max_ns = std::max(packet_timestamp_max_ns, point.time_stamp);
cloud_distance_min_m = std::min(cloud_distance_min_m, point.distance);
cloud_distance_max_m = std::max(cloud_distance_max_m, point.distance);
}
};

struct ScanCutAngles
{
float fov_min;
Expand Down Expand Up @@ -89,6 +190,8 @@

std::optional<point_filters::DownsampleMaskFilter> mask_filter_;

PointcloudDecodeStatistics pointcloud_decode_statistics_;

/// @brief Validates and parse PandarPacket. Currently only checks size, not checksums etc.
/// @param packet The incoming PandarPacket
/// @return Whether the packet was parsed successfully
Expand Down Expand Up @@ -135,6 +238,7 @@
auto & unit = *return_units[block_offset];

if (unit.distance == 0) {
pointcloud_decode_statistics_.invalid_point_count++;
continue;
}

Expand All @@ -144,6 +248,7 @@
distance < SensorT::min_range || SensorT::max_range < distance ||
distance < sensor_configuration_->min_range ||
sensor_configuration_->max_range < distance) {
pointcloud_decode_statistics_.distance_filtered_count++;
continue;
}

Expand All @@ -153,6 +258,7 @@

// Keep only last of multiple identical points
if (return_type == ReturnType::IDENTICAL && block_offset != n_blocks - 1) {
pointcloud_decode_statistics_.identical_filtered_count++;
continue;
}

Expand All @@ -174,6 +280,7 @@
}

if (is_below_multi_return_threshold) {
pointcloud_decode_statistics_.multiple_return_filtered_count++;
continue;
}
}
Expand All @@ -184,6 +291,7 @@

bool in_fov = angle_is_between(scan_cut_angles_.fov_min, scan_cut_angles_.fov_max, azimuth);
if (!in_fov) {
pointcloud_decode_statistics_.fov_filtered_count++;
continue;
}

Expand Down Expand Up @@ -221,9 +329,14 @@
point.azimuth = corrected_angle_data.azimuth_rad;
point.elevation = corrected_angle_data.elevation_rad;

if (!mask_filter_ || !mask_filter_->excluded(point)) {
pc->emplace_back(point);
if (mask_filter_ && mask_filter_->excluded(point)) {
(*pointcloud_decode_statistics_.downsample_mask_filtered_count)++;
continue;
}

pc->emplace_back(point);
pointcloud_decode_statistics_.update_pointcloud_bounds(point);
pointcloud_decode_statistics_.total_kept_point_count++;
}
}
}
Expand Down Expand Up @@ -265,10 +378,13 @@
scan_cut_angles_(
{deg2rad(sensor_configuration_->cloud_min_angle),
deg2rad(sensor_configuration_->cloud_max_angle), deg2rad(sensor_configuration_->cut_angle)}),
logger_(logger)
logger_(logger),
pointcloud_decode_statistics_(false)
{
decode_pc_ = std::make_shared<NebulaPointCloud>();
output_pc_ = std::make_shared<NebulaPointCloud>();
decode_pc_->reserve(SensorT::max_scan_buffer_points);
output_pc_->reserve(SensorT::max_scan_buffer_points);

if (sensor_configuration->downsample_mask_path) {
mask_filter_ = point_filters::DownsampleMaskFilter(
Expand All @@ -277,13 +393,13 @@
logger_->child("Downsample Mask"), true);
}

decode_pc_->reserve(SensorT::max_scan_buffer_points);
output_pc_->reserve(SensorT::max_scan_buffer_points);
pointcloud_decode_statistics_ = PointcloudDecodeStatistics(mask_filter_.has_value());
}

int unpack(const std::vector<uint8_t> & packet) override
{
if (!parse_packet(packet)) {
pointcloud_decode_statistics_.invalid_packet_count++;

Check warning on line 402 in nebula_decoders/include/nebula_decoders/nebula_decoders_hesai/decoders/hesai_decoder.hpp

View check run for this annotation

Codecov / codecov/patch

nebula_decoders/include/nebula_decoders/nebula_decoders_hesai/decoders/hesai_decoder.hpp#L402

Added line #L402 was not covered by tests
return -1;
}

Expand All @@ -295,6 +411,7 @@

if (has_scanned_) {
output_pc_->clear();
pointcloud_decode_statistics_ = PointcloudDecodeStatistics(mask_filter_.has_value());
has_scanned_ = false;
}

Expand Down Expand Up @@ -345,10 +462,10 @@

bool has_scanned() override { return has_scanned_; }

std::tuple<drivers::NebulaPointCloudPtr, double> get_pointcloud() override
std::tuple<drivers::NebulaPointCloudPtr, double, nlohmann::ordered_json> get_pointcloud() override
{
double scan_timestamp_s = static_cast<double>(output_scan_timestamp_ns_) * 1e-9;
return std::make_pair(output_pc_, scan_timestamp_s);
return {output_pc_, scan_timestamp_s, pointcloud_decode_statistics_.to_json()};
}
};

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@

#include <nebula_common/hesai/hesai_common.hpp>
#include <nebula_common/point_types.hpp>
#include <nlohmann/json.hpp>

#include <tuple>
#include <vector>
Expand Down Expand Up @@ -46,7 +47,8 @@ class HesaiScanDecoder

/// @brief Returns the point cloud and timestamp of the last scan
/// @return A tuple of point cloud and timestamp in nanoseconds
virtual std::tuple<drivers::NebulaPointCloudPtr, double> get_pointcloud() = 0;
virtual std::tuple<drivers::NebulaPointCloudPtr, double, nlohmann::ordered_json>
get_pointcloud() = 0;
};
} // namespace nebula::drivers

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
#include "nebula_decoders/nebula_decoders_hesai/decoders/hesai_scan_decoder.hpp"

#include <nebula_common/loggers/logger.hpp>
#include <nlohmann/json.hpp>

#include <pcl_conversions/pcl_conversions.h>

Expand All @@ -33,6 +34,10 @@ namespace nebula::drivers
/// @brief Hesai driver
class HesaiDriver
{
public:
using parse_result_t =
std::optional<std::tuple<drivers::NebulaPointCloudPtr, double, nlohmann::ordered_json>>;

private:
/// @brief Current driver status
Status driver_status_;
Expand Down Expand Up @@ -71,8 +76,7 @@ class HesaiDriver
/// @brief Convert raw packet to pointcloud
/// @param packet Packet to convert
/// @return Tuple of pointcloud and timestamp
std::tuple<drivers::NebulaPointCloudPtr, double> parse_cloud_packet(
const std::vector<uint8_t> & packet);
parse_result_t parse_cloud_packet(const std::vector<uint8_t> & packet);
};

} // namespace nebula::drivers
Expand Down
11 changes: 4 additions & 7 deletions nebula_decoders/src/nebula_decoders_hesai/hesai_driver.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -82,22 +82,19 @@ std::shared_ptr<HesaiScanDecoder> HesaiDriver::initialize_decoder(
logger_->child("Decoder"));
}

std::tuple<drivers::NebulaPointCloudPtr, double> HesaiDriver::parse_cloud_packet(
const std::vector<uint8_t> & packet)
HesaiDriver::parse_result_t HesaiDriver::parse_cloud_packet(const std::vector<uint8_t> & packet)
{
std::tuple<drivers::NebulaPointCloudPtr, double> pointcloud;

if (driver_status_ != nebula::Status::OK) {
logger_->error("Driver not OK.");
return pointcloud;
return {};
}

scan_decoder_->unpack(packet);
if (scan_decoder_->has_scanned()) {
pointcloud = scan_decoder_->get_pointcloud();
return scan_decoder_->get_pointcloud();
}

return pointcloud;
return {};
}

Status HesaiDriver::set_calibration_configuration(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -271,8 +271,8 @@ Status HesaiRosOfflineExtractBag::read_bag()
nebula_msg.header = extracted_msg.header;
for (auto & pkt : extracted_msg.packets) {
std::vector<uint8_t> pkt_data(pkt.data.begin(), std::next(pkt.data.begin(), pkt.size));
auto pointcloud_ts = driver_ptr_->parse_cloud_packet(pkt_data);
auto pointcloud = std::get<0>(pointcloud_ts);
auto parse_result = driver_ptr_->parse_cloud_packet(pkt_data);
const auto & [pointcloud, timestamp_s, decode_stats] = parse_result.value();

nebula_msgs::msg::NebulaPacket nebula_pkt;
nebula_pkt.stamp = pkt.stamp;
Expand Down
4 changes: 2 additions & 2 deletions nebula_examples/src/hesai/hesai_ros_offline_extract_pcd.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -208,9 +208,9 @@ Status HesaiRosOfflineExtractSample::read_bag()
<< bag_message->time_stamp << std::endl;

for (auto & pkt : extracted_msg.packets) {
auto pointcloud_ts = driver_ptr_->parse_cloud_packet(
auto parse_result = driver_ptr_->parse_cloud_packet(
std::vector<uint8_t>(pkt.data.begin(), std::next(pkt.data.begin(), pkt.size)));
auto pointcloud = std::get<0>(pointcloud_ts);
const auto & [pointcloud, timestamp_s, decode_stats] = parse_result.value();

if (!pointcloud) {
continue;
Expand Down
Loading
Loading