diff --git a/perception/radar_object_tracker/CMakeLists.txt b/perception/radar_object_tracker/CMakeLists.txt index 424e2b2d2c13e..3e3afacadd00f 100644 --- a/perception/radar_object_tracker/CMakeLists.txt +++ b/perception/radar_object_tracker/CMakeLists.txt @@ -27,6 +27,7 @@ ament_auto_add_library(radar_object_tracker_node SHARED src/radar_object_tracker_node/radar_object_tracker_node.cpp src/tracker/model/tracker_base.cpp src/tracker/model/linear_motion_tracker.cpp + src/tracker/model/constant_turn_rate_motion_tracker.cpp src/utils/utils.cpp src/data_association/data_association.cpp ) diff --git a/perception/radar_object_tracker/README.md b/perception/radar_object_tracker/README.md index 9f11d50ce2b3f..395bc025c2821 100644 --- a/perception/radar_object_tracker/README.md +++ b/perception/radar_object_tracker/README.md @@ -26,9 +26,10 @@ See more details in the [models.md](models.md). ### Input -| Name | Type | Description | -| --------- | ----------------------------------------------------- | ---------------- | -| `~/input` | `autoware_auto_perception_msgs::msg::DetectedObjects` | Detected objects | +| Name | Type | Description | +| ------------- | ----------------------------------------------------- | ---------------- | +| `~/input` | `autoware_auto_perception_msgs::msg::DetectedObjects` | Detected objects | +| `/vector/map` | `autoware_auto_msgs::msg::HADMapBin` | Map data | ### Output @@ -40,20 +41,38 @@ See more details in the [models.md](models.md). ### Node Parameters -| Name | Type | Default Value | Description | -| --------------------------- | ------ | ----------------------------- | ----------------------------------------------------------- | -| `publish_rate` | double | 30.0 | The rate at which to publish the output messages | -| `world_frame_id` | string | "world" | The frame ID of the world coordinate system | -| `enable_delay_compensation` | bool | false | Whether to enable delay compensation | -| `tracking_config_directory` | string | "" | The directory containing the tracking configuration files | -| `enable_logging` | bool | false | Whether to enable logging | -| `logging_file_path` | string | "~/.ros/association_log.json" | The path to the file where logs should be written | -| `can_assign_matrix` | array | | An array of integers used in the data association algorithm | -| `max_dist_matrix` | array | | An array of doubles used in the data association algorithm | -| `max_area_matrix` | array | | An array of doubles used in the data association algorithm | -| `min_area_matrix` | array | | An array of doubles used in the data association algorithm | -| `max_rad_matrix` | array | | An array of doubles used in the data association algorithm | -| `min_iou_matrix` | array | | An array of doubles used in the data association algorithm | +| Name | Type | Default Value | Description | +| ------------------------------------ | ------ | --------------------------- | --------------------------------------------------------------------------------------------------------------- | +| `publish_rate` | double | 10.0 | The rate at which to publish the output messages | +| `world_frame_id` | string | "map" | The frame ID of the world coordinate system | +| `enable_delay_compensation` | bool | false | Whether to enable delay compensation. If set to `true`, output topic is published by timer with `publish_rate`. | +| `tracking_config_directory` | string | "./config/tracking/" | The directory containing the tracking configuration files | +| `enable_logging` | bool | false | Whether to enable logging | +| `logging_file_path` | string | "/tmp/association_log.json" | The path to the file where logs should be written | +| `tracker_lifetime` | double | 1.0 | The lifetime of the tracker in seconds | +| `use_distance_based_noise_filtering` | bool | true | Whether to use distance based filtering | +| `minimum_range_threshold` | double | 70.0 | Minimum distance threshold for filtering in meters | +| `use_map_based_noise_filtering` | bool | true | Whether to use map based filtering | +| `max_distance_from_lane` | double | 5.0 | Maximum distance from lane for filtering in meters | +| `max_angle_diff_from_lane` | double | 0.785398 | Maximum angle difference from lane for filtering in radians | +| `max_lateral_velocity` | double | 5.0 | Maximum lateral velocity for filtering in m/s | +| `can_assign_matrix` | array | | An array of integers used in the data association algorithm | +| `max_dist_matrix` | array | | An array of doubles used in the data association algorithm | +| `max_area_matrix` | array | | An array of doubles used in the data association algorithm | +| `min_area_matrix` | array | | An array of doubles used in the data association algorithm | +| `max_rad_matrix` | array | | An array of doubles used in the data association algorithm | +| `min_iou_matrix` | array | | An array of doubles used in the data association algorithm | + +See more details in the [models.md](models.md). + +### Tracker parameters + +Currently, this package supports the following trackers: + +- `linear_motion_tracker` +- `constant_turn_rate_motion_tracker` + +Default settings for each tracker are defined in the [./config/tracking/](./config/tracking/), and described in [models.md](models.md). ## Assumptions / Known limits diff --git a/perception/radar_object_tracker/config/data_association_matrix.param.yaml b/perception/radar_object_tracker/config/data_association_matrix.param.yaml index 104d790d185db..69628414e67d4 100644 --- a/perception/radar_object_tracker/config/data_association_matrix.param.yaml +++ b/perception/radar_object_tracker/config/data_association_matrix.param.yaml @@ -14,10 +14,10 @@ max_dist_matrix: #UNKNOWN, CAR, TRUCK, BUS, TRAILER, MOTORBIKE, BICYCLE, PEDESTRIAN [4.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, #UNKNOWN - 4.0, 4.0, 5.0, 5.0, 5.0, 1.0, 1.0, 1.0, #CAR - 4.0, 2.0, 5.0, 5.0, 5.0, 1.0, 1.0, 1.0, #TRUCK - 4.0, 2.0, 5.0, 5.0, 5.0, 1.0, 1.0, 1.0, #BUS - 4.0, 2.0, 5.0, 5.0, 5.0, 1.0, 1.0, 1.0, #TRAILER + 4.0, 8.0, 8.0, 8.0, 8.0, 1.0, 1.0, 1.0, #CAR + 4.0, 6.0, 8.0, 8.0, 8.0, 1.0, 1.0, 1.0, #TRUCK + 4.0, 6.0, 8.0, 8.0, 8.0, 1.0, 1.0, 1.0, #BUS + 4.0, 6.0, 8.0, 8.0, 8.0, 1.0, 1.0, 1.0, #TRAILER 3.0, 1.0, 1.0, 1.0, 1.0, 3.0, 3.0, 2.0, #MOTORBIKE 3.0, 1.0, 1.0, 1.0, 1.0, 3.0, 3.0, 2.0, #BICYCLE 2.0, 1.0, 1.0, 1.0, 1.0, 3.0, 3.0, 2.0] #PEDESTRIAN diff --git a/perception/radar_object_tracker/config/default_tracker.param.yaml b/perception/radar_object_tracker/config/default_tracker.param.yaml index 757125202d69b..6c26034860e1b 100644 --- a/perception/radar_object_tracker/config/default_tracker.param.yaml +++ b/perception/radar_object_tracker/config/default_tracker.param.yaml @@ -1,9 +1,9 @@ /**: ros__parameters: - car_tracker: "linear_motion_tracker" - truck_tracker: "linear_motion_tracker" - bus_tracker: "linear_motion_tracker" - trailer_tracker: "linear_motion_tracker" - pedestrian_tracker: "linear_motion_tracker" - bicycle_tracker: "linear_motion_tracker" - motorcycle_tracker: "linear_motion_tracker" + car_tracker: "constant_turn_rate_motion_tracker" + truck_tracker: "constant_turn_rate_motion_tracker" + bus_tracker: "constant_turn_rate_motion_tracker" + trailer_tracker: "constant_turn_rate_motion_tracker" + pedestrian_tracker: "constant_turn_rate_motion_tracker" + bicycle_tracker: "constant_turn_rate_motion_tracker" + motorcycle_tracker: "constant_turn_rate_motion_tracker" diff --git a/perception/radar_object_tracker/config/radar_object_tracker.param.yaml b/perception/radar_object_tracker/config/radar_object_tracker.param.yaml new file mode 100644 index 0000000000000..d2c0841cf372e --- /dev/null +++ b/perception/radar_object_tracker/config/radar_object_tracker.param.yaml @@ -0,0 +1,26 @@ +# general parameters for radar_object_tracker node +/**: + ros__parameters: + # basic settings + world_frame_id: "map" + tracker_lifetime: 1.0 # [sec] + measurement_count_threshold: 3 # object will be published if it is tracked more than this threshold + + # delay compensate parameters + publish_rate: 10.0 + enable_delay_compensation: true + + # logging + enable_logging: false + logging_file_path: "/tmp/association_log.json" + + # filtering + ## 1. distance based filtering: remove closer objects than this threshold + use_distance_based_noise_filtering: true + minimum_range_threshold: 60.0 # [m] + + ## 2. lanelet map based filtering + use_map_based_noise_filtering: true + max_distance_from_lane: 5.0 # [m] + max_angle_diff_from_lane: 0.785398 # [rad] (45 deg) + max_lateral_velocity: 7.0 # [m/s] diff --git a/perception/radar_object_tracker/config/tracking/constant_turn_rate_motion_tracker.yaml b/perception/radar_object_tracker/config/tracking/constant_turn_rate_motion_tracker.yaml new file mode 100644 index 0000000000000..f80f881cabf03 --- /dev/null +++ b/perception/radar_object_tracker/config/tracking/constant_turn_rate_motion_tracker.yaml @@ -0,0 +1,36 @@ +default: + # This file defines the parameters for the linear motion tracker. + # All this parameter coordinate is assumed to be in the vehicle coordinate system. + # So, the x axis is pointing to the front of the vehicle, y axis is pointing to the left of the vehicle. + ekf_params: + # random walk noise is used to model the acceleration noise + process_noise_std: # [m/s^2] + x: 0.5 + y: 0.5 + yaw: 0.1 + vx: 1.0 # assume 1m/s velocity noise + wz: 0.4 + measurement_noise_std: + x: 4.0 # [m] + y: 4.0 # [m] + # y: 0.02 # rad/m if use_polar_coordinate_in_measurement_noise is true + yaw: 0.2 # [rad] + vx: 10 # [m/s] + initial_covariance_std: + x: 3.0 # [m] + y: 6.0 # [m] + yaw: 10.0 # [rad] + vx: 100.0 # [m/s] + wz: 10.0 # [rad/s] + # input flag + trust_yaw_input: false # set true if yaw input of sensor is reliable + trust_twist_input: false # set true if twist input of sensor is reliable + use_polar_coordinate_in_measurement_noise: false # set true if you want to define the measurement noise in polar coordinate + assume_zero_yaw_rate: false # set true if you want to assume zero yaw rate + # output limitation + limit: + max_speed: 80.0 # [m/s] + # low pass filter is used to smooth the yaw and shape estimation + low_pass_filter: + time_constant: 1.0 # [s] + sampling_time: 0.1 # [s] diff --git a/perception/radar_object_tracker/config/tracking/linear_motion_tracker.yaml b/perception/radar_object_tracker/config/tracking/linear_motion_tracker.yaml index 71367f4575193..5e813558a2bff 100644 --- a/perception/radar_object_tracker/config/tracking/linear_motion_tracker.yaml +++ b/perception/radar_object_tracker/config/tracking/linear_motion_tracker.yaml @@ -7,22 +7,28 @@ default: process_noise_std: # [m/s^2] ax: 0.98 # assume 0.1G acceleration noise ay: 0.98 - vx: 0.1 # assume 0.1m/s velocity noise - vy: 0.1 + vx: 1.0 # assume 1m/s velocity noise + vy: 1.0 x: 1.0 # assume 1m position noise y: 1.0 measurement_noise_std: x: 0.6 # [m] - y: 0.9 # [m] - vx: 0.4 # [m/s] - vy: 1 # [m/s] + # y: 4.0 # [m] + y: 0.01 # rad/m if use_polar_coordinate_in_measurement_noise is true + vx: 10 # [m/s] + vy: 100 # [m/s] initial_covariance_std: x: 3.0 # [m] y: 6.0 # [m] - vx: 1.0 # [m/s] - vy: 5.0 # [m/s] - ax: 0.5 # [m/s^2] - ay: 1.0 # [m/s^2] + vx: 1000.0 # [m/s] + vy: 1000.0 # [m/s] + ax: 1000.0 # [m/s^2] + ay: 1000.0 # [m/s^2] + estimate_acc: false # set true if you want to estimate the acceleration + # input flag + trust_yaw_input: false # set true if yaw input of sensor is reliable + trust_twist_input: false # set true if twist input of sensor is reliable + use_polar_coordinate_in_measurement_noise: true # set true if you want to define the measurement noise in polar coordinate # output limitation limit: max_speed: 80.0 # [m/s] diff --git a/perception/radar_object_tracker/image/noise_filtering.drawio.svg b/perception/radar_object_tracker/image/noise_filtering.drawio.svg new file mode 100644 index 0000000000000..cd044cdfa1379 --- /dev/null +++ b/perception/radar_object_tracker/image/noise_filtering.drawio.svg @@ -0,0 +1,165 @@ + + + + + + + + + + + +
+
+
self vehicle
+
+
+
+ self vehicle +
+
+ + + + + +
+
+
removed
+
+
+
+ removed +
+
+ + + + + +
+
+
distance threshold
+
+
+
+ distance threshold +
+
+ + + + + +
+
+
+ filtered object +
+
+
+
+ filtered object +
+
+ + + + + + + + + +
+
+
removed
+
+
+
+ removed +
+
+ + + + + + + +
+
+
+ lane distance +
+ threshold +
+
+
+
+ lane distance... +
+
+ + + + + + + + + +
+
+
+ removed +
+ with +
+ angle threshold +
+
+
+
+ removed... +
+
+ + + + + + + +
+
+
+ removed by +
+ high +
+ lateral velocity +
+
+
+
+ removed by... +
+
+
+ + + + Text is not SVG - cannot display + + +
diff --git a/perception/radar_object_tracker/include/radar_object_tracker/radar_object_tracker_node/radar_object_tracker_node.hpp b/perception/radar_object_tracker/include/radar_object_tracker/radar_object_tracker_node/radar_object_tracker_node.hpp index 898a7855aabae..9a3fdf602a3ca 100644 --- a/perception/radar_object_tracker/include/radar_object_tracker/radar_object_tracker_node/radar_object_tracker_node.hpp +++ b/perception/radar_object_tracker/include/radar_object_tracker/radar_object_tracker_node/radar_object_tracker_node.hpp @@ -33,6 +33,16 @@ #include #endif +#include +#include +#include + +#include +#include +#include +#include +#include +#include #include #include @@ -41,8 +51,11 @@ #include #include +using autoware_auto_mapping_msgs::msg::HADMapBin; using autoware_auto_perception_msgs::msg::DetectedObject; using autoware_auto_perception_msgs::msg::DetectedObjects; +using autoware_auto_perception_msgs::msg::TrackedObject; +using autoware_auto_perception_msgs::msg::TrackedObjects; class RadarObjectTrackerNode : public rclcpp::Node { @@ -50,20 +63,25 @@ class RadarObjectTrackerNode : public rclcpp::Node explicit RadarObjectTrackerNode(const rclcpp::NodeOptions & node_options); private: + // pub-sub rclcpp::Publisher::SharedPtr tracked_objects_pub_; rclcpp::Subscription::SharedPtr detected_object_sub_; - rclcpp::TimerBase::SharedPtr publish_timer_; // publish timer + rclcpp::TimerBase::SharedPtr publish_timer_; // publish timer + rclcpp::Subscription::SharedPtr sub_map_; // map subscriber tf2_ros::Buffer tf_buffer_; tf2_ros::TransformListener tf_listener_; - + float tracker_lifetime_; std::map tracker_map_; + int measurement_count_threshold_; + void onMeasurement( const autoware_auto_perception_msgs::msg::DetectedObjects::ConstSharedPtr input_objects_msg); void onTimer(); + void onMap(const HADMapBin::ConstSharedPtr map_msg); std::string world_frame_id_; // tracking frame std::string tracker_config_directory_; @@ -77,11 +95,36 @@ class RadarObjectTrackerNode : public rclcpp::Node std::string path; } logging_; + // noise reduction + bool use_distance_based_noise_filtering_; + bool use_map_based_noise_filtering_; + + // distance based noise reduction + double minimum_range_threshold_; + std::string sensor_frame_ = "base_link"; + + // map based noise reduction + bool map_is_loaded_ = false; + double max_distance_from_lane_; + double max_lateral_velocity_; + double max_angle_diff_from_lane_; + // Lanelet Map Pointers + std::shared_ptr lanelet_map_ptr_; + std::shared_ptr routing_graph_ptr_; + std::shared_ptr traffic_rules_ptr_; + // Crosswalk Entry Points + // lanelet::ConstLanelets crosswalks_; + void checkTrackerLifeCycle( std::list> & list_tracker, const rclcpp::Time & time, const geometry_msgs::msg::Transform & self_transform); void sanitizeTracker( std::list> & list_tracker, const rclcpp::Time & time); + void mapBasedNoiseFilter( + std::list> & list_tracker, const rclcpp::Time & time); + void distanceBasedNoiseFilter( + std::list> & list_tracker, const rclcpp::Time & time, + const geometry_msgs::msg::Transform & self_transform); std::shared_ptr createNewTracker( const autoware_auto_perception_msgs::msg::DetectedObject & object, const rclcpp::Time & time, const geometry_msgs::msg::Transform & self_transform) const; diff --git a/perception/radar_object_tracker/include/radar_object_tracker/tracker/model/constant_turn_rate_motion_tracker.hpp b/perception/radar_object_tracker/include/radar_object_tracker/tracker/model/constant_turn_rate_motion_tracker.hpp new file mode 100644 index 0000000000000..ac4b2cd1a0acb --- /dev/null +++ b/perception/radar_object_tracker/include/radar_object_tracker/tracker/model/constant_turn_rate_motion_tracker.hpp @@ -0,0 +1,112 @@ +// Copyright 2020 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef RADAR_OBJECT_TRACKER__TRACKER__MODEL__CONSTANT_TURN_RATE_MOTION_TRACKER_HPP_ +#define RADAR_OBJECT_TRACKER__TRACKER__MODEL__CONSTANT_TURN_RATE_MOTION_TRACKER_HPP_ + +#include "radar_object_tracker/tracker/model/tracker_base.hpp" + +#include + +#include + +using Label = autoware_auto_perception_msgs::msg::ObjectClassification; +class ConstantTurnRateMotionTracker : public Tracker // means constant turn rate motion tracker +{ +private: + autoware_auto_perception_msgs::msg::DetectedObject object_; + rclcpp::Logger logger_; + +private: + KalmanFilter ekf_; + rclcpp::Time last_update_time_; + enum IDX { X = 0, Y = 1, YAW = 2, VX = 3, WZ = 4 }; + + struct EkfParams + { + // dimension + char dim_x = 5; + // system noise + double q_cov_x; + double q_cov_y; + double q_cov_yaw; + double q_cov_vx; + double q_cov_wz; + // measurement noise + double r_cov_x; + double r_cov_y; + double r_cov_yaw; + double r_cov_vx; + // initial state covariance + double p0_cov_x; + double p0_cov_y; + double p0_cov_yaw; + double p0_cov_vx; + double p0_cov_wz; + }; + static EkfParams ekf_params_; + + // limitation + static double max_vx_; + // rough tracking parameters + float z_; + + // lpf parameter + static double filter_tau_; // time constant of 1st order low pass filter + static double filter_dt_; // sampling time of 1st order low pass filter + + // static flags + static bool is_initialized_; + static bool trust_yaw_input_; + static bool trust_twist_input_; + static bool use_polar_coordinate_in_measurement_noise_; + static bool assume_zero_yaw_rate_; + +private: + struct BoundingBox + { + double length; + double width; + double height; + }; + struct Cylinder + { + double width; + double height; + }; + BoundingBox bounding_box_; + Cylinder cylinder_; + +public: + ConstantTurnRateMotionTracker( + const rclcpp::Time & time, const autoware_auto_perception_msgs::msg::DetectedObject & object, + const std::string & tracker_param_file, const std::uint8_t & label); + + static void loadDefaultModelParameters(const std::string & path); + bool predict(const rclcpp::Time & time) override; + bool predict(const double dt, KalmanFilter & ekf) const; + bool measure( + const autoware_auto_perception_msgs::msg::DetectedObject & object, const rclcpp::Time & time, + const geometry_msgs::msg::Transform & self_transform) override; + bool measureWithPose( + const autoware_auto_perception_msgs::msg::DetectedObject & object, + const geometry_msgs::msg::Transform & self_transform); + bool measureWithShape(const autoware_auto_perception_msgs::msg::DetectedObject & object); + bool getTrackedObject( + const rclcpp::Time & time, + autoware_auto_perception_msgs::msg::TrackedObject & object) const override; + virtual ~ConstantTurnRateMotionTracker() {} +}; + +#endif // RADAR_OBJECT_TRACKER__TRACKER__MODEL__CONSTANT_TURN_RATE_MOTION_TRACKER_HPP_ diff --git a/perception/radar_object_tracker/include/radar_object_tracker/tracker/model/linear_motion_tracker.hpp b/perception/radar_object_tracker/include/radar_object_tracker/tracker/model/linear_motion_tracker.hpp index 108e79c043ba0..77caa7a266481 100644 --- a/perception/radar_object_tracker/include/radar_object_tracker/tracker/model/linear_motion_tracker.hpp +++ b/perception/radar_object_tracker/include/radar_object_tracker/tracker/model/linear_motion_tracker.hpp @@ -56,18 +56,25 @@ class LinearMotionTracker : public Tracker double p0_cov_vy; double p0_cov_ax; double p0_cov_ay; - } ekf_params_; + }; + static EkfParams ekf_params_; // limitation - double max_vx_; - double max_vy_; + static double max_vx_; + static double max_vy_; // rough tracking parameters float z_; float yaw_; // lpf parameter - double filter_tau_; // time constant of 1st order low pass filter - double filter_dt_; // sampling time of 1st order low pass filter + static double filter_tau_; // time constant of 1st order low pass filter + static double filter_dt_; // sampling time of 1st order low pass filter + + static bool is_initialized_; + static bool estimate_acc_; + static bool trust_yaw_input_; + static bool trust_twist_input_; + static bool use_polar_coordinate_in_measurement_noise_; private: struct BoundingBox @@ -89,13 +96,15 @@ class LinearMotionTracker : public Tracker const rclcpp::Time & time, const autoware_auto_perception_msgs::msg::DetectedObject & object, const std::string & tracker_param_file, const std::uint8_t & label); - void loadDefaultModelParameters(const std::string & path); + static void loadDefaultModelParameters(const std::string & path); bool predict(const rclcpp::Time & time) override; bool predict(const double dt, KalmanFilter & ekf) const; bool measure( const autoware_auto_perception_msgs::msg::DetectedObject & object, const rclcpp::Time & time, const geometry_msgs::msg::Transform & self_transform) override; - bool measureWithPose(const autoware_auto_perception_msgs::msg::DetectedObject & object); + bool measureWithPose( + const autoware_auto_perception_msgs::msg::DetectedObject & object, + const geometry_msgs::msg::Transform & self_transform); bool measureWithShape(const autoware_auto_perception_msgs::msg::DetectedObject & object); bool getTrackedObject( const rclcpp::Time & time, diff --git a/perception/radar_object_tracker/include/radar_object_tracker/tracker/tracker.hpp b/perception/radar_object_tracker/include/radar_object_tracker/tracker/tracker.hpp index 7da940912a253..70045e6b16a07 100644 --- a/perception/radar_object_tracker/include/radar_object_tracker/tracker/tracker.hpp +++ b/perception/radar_object_tracker/include/radar_object_tracker/tracker/tracker.hpp @@ -15,6 +15,7 @@ #ifndef RADAR_OBJECT_TRACKER__TRACKER__TRACKER_HPP_ #define RADAR_OBJECT_TRACKER__TRACKER__TRACKER_HPP_ +#include "model/constant_turn_rate_motion_tracker.hpp" #include "model/linear_motion_tracker.hpp" #include "model/tracker_base.hpp" diff --git a/perception/radar_object_tracker/launch/radar_object_tracker.launch.xml b/perception/radar_object_tracker/launch/radar_object_tracker.launch.xml index 7ef3e58a64161..6e6813d3e40ff 100644 --- a/perception/radar_object_tracker/launch/radar_object_tracker.launch.xml +++ b/perception/radar_object_tracker/launch/radar_object_tracker.launch.xml @@ -4,19 +4,23 @@ - + + + + + + + + + - - - - diff --git a/perception/radar_object_tracker/models.md b/perception/radar_object_tracker/models.md index a4c53db156f0f..f4e4c6a02edaf 100644 --- a/perception/radar_object_tracker/models.md +++ b/perception/radar_object_tracker/models.md @@ -74,3 +74,33 @@ v_{k+1} &= v_k \\ \omega_{k+1} &= \omega_k \end{align} $$ + +## Noise filtering + +Radar sensors often have noisy measurement. So we use the following filter to reduce the false positive objects. + +The figure below shows the current noise filtering process. + +![noise_filter](image/noise_filtering.drawio.svg) + +### minimum range filter + +In most cases, Radar sensors are used with other sensors such as LiDAR and Camera, and Radar sensors are used to detect objects far away. So we can filter out objects that are too close to the sensor. + +`use_distance_based_noise_filtering` parameter is used to enable/disable this filter, and `minimum_range_threshold` parameter is used to set the threshold. + +### lanelet based filter + +With lanelet map information, We can filter out false positive objects that are not likely important obstacles. + +We filter out objects that satisfy the following conditions: + +- too large lateral distance from lane +- velocity direction is too different from lane direction +- too large lateral velocity + +Each condition can be set by the following parameters: + +- `max_distance_from_lane` +- `max_angle_diff_from_lane` +- `max_lateral_velocity` diff --git a/perception/radar_object_tracker/package.xml b/perception/radar_object_tracker/package.xml index 46fb0826e30e1..6975979f4e32d 100644 --- a/perception/radar_object_tracker/package.xml +++ b/perception/radar_object_tracker/package.xml @@ -16,6 +16,7 @@ autoware_auto_perception_msgs eigen kalman_filter + lanelet2_extension mussp object_recognition_utils rclcpp diff --git a/perception/radar_object_tracker/src/radar_object_tracker_node/radar_object_tracker_node.cpp b/perception/radar_object_tracker/src/radar_object_tracker_node/radar_object_tracker_node.cpp index f32084493f34e..6d801302ab6c5 100644 --- a/perception/radar_object_tracker/src/radar_object_tracker_node/radar_object_tracker_node.cpp +++ b/perception/radar_object_tracker/src/radar_object_tracker_node/radar_object_tracker_node.cpp @@ -60,6 +60,131 @@ boost::optional getTransformAnonymous( } } +// check if lanelet is close enough to the target lanelet +bool isDuplicated( + const std::pair & target_lanelet, + const lanelet::ConstLanelets & lanelets) +{ + const double CLOSE_LANELET_THRESHOLD = 0.1; + for (const auto & lanelet : lanelets) { + const auto target_lanelet_end_p = target_lanelet.second.centerline2d().back(); + const auto lanelet_end_p = lanelet.centerline2d().back(); + const double dist = std::hypot( + target_lanelet_end_p.x() - lanelet_end_p.x(), target_lanelet_end_p.y() - lanelet_end_p.y()); + if (dist < CLOSE_LANELET_THRESHOLD) { + return true; + } + } + + return false; +} + +// check if the lanelet is valid for object tracking +bool checkCloseLaneletCondition( + const std::pair & lanelet, const TrackedObject & object, + const double max_distance_from_lane, const double max_angle_diff_from_lane) +{ + // Step1. If we only have one point in the centerline, we will ignore the lanelet + if (lanelet.second.centerline().size() <= 1) { + return false; + } + + // Step2. Check if the obstacle is inside or close enough to the lanelet + lanelet::BasicPoint2d search_point( + object.kinematics.pose_with_covariance.pose.position.x, + object.kinematics.pose_with_covariance.pose.position.y); + if (!lanelet::geometry::inside(lanelet.second, search_point)) { + const auto distance = lanelet.first; + if (distance > max_distance_from_lane) { + return false; + } + } + + // Step3. Calculate the angle difference between the lane angle and obstacle angle + const double object_yaw = tf2::getYaw(object.kinematics.pose_with_covariance.pose.orientation); + const double lane_yaw = lanelet::utils::getLaneletAngle( + lanelet.second, object.kinematics.pose_with_covariance.pose.position); + const double delta_yaw = object_yaw - lane_yaw; + const double normalized_delta_yaw = tier4_autoware_utils::normalizeRadian(delta_yaw); + const double abs_norm_delta = std::fabs(normalized_delta_yaw); + + // Step4. Check if the closest lanelet is valid, and add all + // of the lanelets that are below max_dist and max_delta_yaw + const double object_vel = object.kinematics.twist_with_covariance.twist.linear.x; + const bool is_yaw_reversed = M_PI - max_angle_diff_from_lane < abs_norm_delta && object_vel < 0.0; + if (is_yaw_reversed || abs_norm_delta < max_angle_diff_from_lane) { + return true; + } + + return false; +} + +lanelet::ConstLanelets getClosestValidLanelets( + const TrackedObject & object, const lanelet::LaneletMapPtr & lanelet_map_ptr, + const double max_distance_from_lane, const double max_angle_diff_from_lane) +{ + // obstacle point + lanelet::BasicPoint2d search_point( + object.kinematics.pose_with_covariance.pose.position.x, + object.kinematics.pose_with_covariance.pose.position.y); + + // nearest lanelet + std::vector> surrounding_lanelets = + lanelet::geometry::findNearest(lanelet_map_ptr->laneletLayer, search_point, 10); + + // No Closest Lanelets + if (surrounding_lanelets.empty()) { + return {}; + } + + lanelet::ConstLanelets closest_lanelets; + for (const auto & lanelet : surrounding_lanelets) { + // Check if the close lanelets meet the necessary condition for start lanelets and + // Check if similar lanelet is inside the closest lanelet + if ( + !checkCloseLaneletCondition( + lanelet, object, max_distance_from_lane, max_angle_diff_from_lane) || + isDuplicated(lanelet, closest_lanelets)) { + continue; + } + + closest_lanelets.push_back(lanelet.second); + } + + return closest_lanelets; +} + +bool hasValidVelocityDirectionToLanelet( + const TrackedObject & object, const lanelet::ConstLanelets & lanelets, + const double max_lateral_velocity) +{ + // get object velocity direction + const double object_yaw = tf2::getYaw(object.kinematics.pose_with_covariance.pose.orientation); + const double object_vel_x = object.kinematics.twist_with_covariance.twist.linear.x; + const double object_vel_y = object.kinematics.twist_with_covariance.twist.linear.y; + const double object_vel_yaw = std::atan2(object_vel_y, object_vel_x); // local frame + const double object_vel_yaw_global = + tier4_autoware_utils::normalizeRadian(object_yaw + object_vel_yaw); + const double object_vel = std::hypot(object_vel_x, object_vel_y); + + for (const auto & lanelet : lanelets) { + // get lanelet angle + const double lane_yaw = lanelet::utils::getLaneletAngle( + lanelet, object.kinematics.pose_with_covariance.pose.position); + const double delta_yaw = object_vel_yaw_global - lane_yaw; + const double normalized_delta_yaw = tier4_autoware_utils::normalizeRadian(delta_yaw); + + // get lateral velocity + const double lane_vel = object_vel * std::sin(normalized_delta_yaw); + // std::cout << "lane_vel: " << lane_vel << " , delta yaw:" << normalized_delta_yaw << " , + // object_vel " << object_vel <("output", rclcpp::QoS{1}); + sub_map_ = this->create_subscription( + "/vector_map", rclcpp::QoS{1}.transient_local(), + std::bind(&RadarObjectTrackerNode::onMap, this, std::placeholders::_1)); // Parameters - double publish_rate = declare_parameter("publish_rate", 30.0); - world_frame_id_ = declare_parameter("world_frame_id", "world"); - bool enable_delay_compensation{declare_parameter("enable_delay_compensation", false)}; - tracker_config_directory_ = declare_parameter("tracking_config_directory", ""); - logging_.enable = declare_parameter("enable_logging", false); - logging_.path = - declare_parameter("logging_file_path", "~/.ros/association_log.json"); + tracker_lifetime_ = declare_parameter("tracker_lifetime"); + double publish_rate = declare_parameter("publish_rate"); + measurement_count_threshold_ = declare_parameter("measurement_count_threshold"); + world_frame_id_ = declare_parameter("world_frame_id"); + bool enable_delay_compensation{declare_parameter("enable_delay_compensation")}; + tracker_config_directory_ = declare_parameter("tracking_config_directory"); + logging_.enable = declare_parameter("enable_logging"); + logging_.path = declare_parameter("logging_file_path"); + + // noise filter + use_distance_based_noise_filtering_ = + declare_parameter("use_distance_based_noise_filtering"); + use_map_based_noise_filtering_ = declare_parameter("use_map_based_noise_filtering"); + minimum_range_threshold_ = declare_parameter("minimum_range_threshold"); + max_distance_from_lane_ = declare_parameter("max_distance_from_lane"); + max_angle_diff_from_lane_ = declare_parameter("max_angle_diff_from_lane"); + max_lateral_velocity_ = declare_parameter("max_lateral_velocity"); // Load tracking config file if (tracker_config_directory_.empty()) { @@ -132,6 +270,17 @@ RadarObjectTrackerNode::RadarObjectTrackerNode(const rclcpp::NodeOptions & node_ std::make_pair(Label::MOTORCYCLE, this->declare_parameter("motorcycle_tracker"))); } +// load map information to node parameter +void RadarObjectTrackerNode::onMap(const HADMapBin::ConstSharedPtr msg) +{ + RCLCPP_INFO(get_logger(), "[Radar Object Tracker]: Start loading lanelet"); + lanelet_map_ptr_ = std::make_shared(); + lanelet::utils::conversion::fromBinMsg( + *msg, lanelet_map_ptr_, &traffic_rules_ptr_, &routing_graph_ptr_); + RCLCPP_INFO(get_logger(), "[Radar Object Tracker]: Map is loaded"); + map_is_loaded_ = true; +} + void RadarObjectTrackerNode::onMeasurement( const autoware_auto_perception_msgs::msg::DetectedObjects::ConstSharedPtr input_objects_msg) { @@ -176,6 +325,14 @@ void RadarObjectTrackerNode::onMeasurement( /* life cycle check */ checkTrackerLifeCycle(list_tracker_, measurement_time, *self_transform); + /* map based noise filter */ + if (map_is_loaded_ && use_map_based_noise_filtering_) { + mapBasedNoiseFilter(list_tracker_, measurement_time); + } + /* distance based noise filter */ + if (use_distance_based_noise_filtering_) { + distanceBasedNoiseFilter(list_tracker_, measurement_time, *self_transform); + } /* sanitize trackers */ sanitizeTracker(list_tracker_, measurement_time); @@ -207,6 +364,10 @@ std::shared_ptr RadarObjectTrackerNode::createNewTracker( if (tracker == "linear_motion_tracker") { std::string config_file = tracker_config_directory_ + "linear_motion_tracker.yaml"; return std::make_shared(time, object, config_file, label); + } else if (tracker == "constant_turn_rate_motion_tracker") { + std::string config_file = + tracker_config_directory_ + "constant_turn_rate_motion_tracker.yaml"; + return std::make_shared(time, object, config_file, label); } else { // not implemented yet so put warning RCLCPP_WARN(get_logger(), "Tracker %s is not implemented yet", tracker.c_str()); @@ -227,6 +388,16 @@ void RadarObjectTrackerNode::onTimer() /* life cycle check */ checkTrackerLifeCycle(list_tracker_, current_time, *self_transform); + + /* map based noise filter */ + if (map_is_loaded_ && use_map_based_noise_filtering_) { + mapBasedNoiseFilter(list_tracker_, current_time); + } + /* distance based noise filter */ + if (use_distance_based_noise_filtering_) { + distanceBasedNoiseFilter(list_tracker_, current_time, *self_transform); + } + /* sanitize trackers */ sanitizeTracker(list_tracker_, current_time); @@ -238,12 +409,9 @@ void RadarObjectTrackerNode::checkTrackerLifeCycle( std::list> & list_tracker, const rclcpp::Time & time, [[maybe_unused]] const geometry_msgs::msg::Transform & self_transform) { - /* params */ - constexpr float max_elapsed_time = 1.0; - /* delete tracker */ for (auto itr = list_tracker.begin(); itr != list_tracker.end(); ++itr) { - const bool is_old = max_elapsed_time < (*itr)->getElapsedTimeFromLastUpdate(time); + const bool is_old = tracker_lifetime_ < (*itr)->getElapsedTimeFromLastUpdate(time); if (is_old) { auto erase_itr = itr; --itr; @@ -252,6 +420,51 @@ void RadarObjectTrackerNode::checkTrackerLifeCycle( } } +// remove objects by lanelet information +void RadarObjectTrackerNode::mapBasedNoiseFilter( + std::list> & list_tracker, const rclcpp::Time & time) +{ + for (auto itr = list_tracker.begin(); itr != list_tracker.end(); ++itr) { + autoware_auto_perception_msgs::msg::TrackedObject object; + (*itr)->getTrackedObject(time, object); + const auto closest_lanelets = getClosestValidLanelets( + object, lanelet_map_ptr_, max_distance_from_lane_, max_angle_diff_from_lane_); + + // 1. If the object is not close to any lanelet, delete the tracker + const bool no_closest_lanelet = closest_lanelets.empty(); + // 2. If the object velocity direction is not close to the lanelet direction, delete the tracker + const bool is_velocity_direction_close_to_lanelet = + hasValidVelocityDirectionToLanelet(object, closest_lanelets, max_lateral_velocity_); + if (no_closest_lanelet || !is_velocity_direction_close_to_lanelet) { + // std::cout << "object removed due to map based noise filter" << " no close lanelet: " << + // no_closest_lanelet << " velocity direction flag:" << is_velocity_direction_close_to_lanelet + // << std::endl; + itr = list_tracker.erase(itr); + --itr; + } + } +} + +// remove objects by distance +void RadarObjectTrackerNode::distanceBasedNoiseFilter( + std::list> & list_tracker, const rclcpp::Time & time, + const geometry_msgs::msg::Transform & self_transform) +{ + // remove objects that are too close + for (auto itr = list_tracker.begin(); itr != list_tracker.end(); ++itr) { + autoware_auto_perception_msgs::msg::TrackedObject object; + (*itr)->getTrackedObject(time, object); + const double distance = std::hypot( + object.kinematics.pose_with_covariance.pose.position.x - self_transform.translation.x, + object.kinematics.pose_with_covariance.pose.position.y - self_transform.translation.y); + if (distance < minimum_range_threshold_) { + // std::cout << "object removed due to small distance. distance: " << distance << std::endl; + itr = list_tracker.erase(itr); + --itr; + } + } +} + void RadarObjectTrackerNode::sanitizeTracker( std::list> & list_tracker, const rclcpp::Time & time) { @@ -322,8 +535,7 @@ void RadarObjectTrackerNode::sanitizeTracker( inline bool RadarObjectTrackerNode::shouldTrackerPublish( const std::shared_ptr tracker) const { - constexpr int measurement_count_threshold = 3; - if (tracker->getTotalMeasurementCount() < measurement_count_threshold) { + if (tracker->getTotalMeasurementCount() < measurement_count_threshold_) { return false; } return true; diff --git a/perception/radar_object_tracker/src/tracker/model/constant_turn_rate_motion_tracker.cpp b/perception/radar_object_tracker/src/tracker/model/constant_turn_rate_motion_tracker.cpp new file mode 100644 index 0000000000000..5815fe34a68a9 --- /dev/null +++ b/perception/radar_object_tracker/src/tracker/model/constant_turn_rate_motion_tracker.cpp @@ -0,0 +1,624 @@ +// Copyright 2020 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// +// Author: v1.0 Yukihiro Saito +// + +#include "radar_object_tracker/tracker/model/constant_turn_rate_motion_tracker.hpp" + +#include "radar_object_tracker/utils/utils.hpp" + +#include +#include + +#include +#include +#include +#include + +#ifdef ROS_DISTRO_GALACTIC +#include +#else +#include +#endif + +#define EIGEN_MPL2_ONLY +#include +#include +#include +#include + +#include + +using Label = autoware_auto_perception_msgs::msg::ObjectClassification; + +// init static member variables +bool ConstantTurnRateMotionTracker::is_initialized_ = false; +ConstantTurnRateMotionTracker::EkfParams ConstantTurnRateMotionTracker::ekf_params_; +double ConstantTurnRateMotionTracker::max_vx_; +double ConstantTurnRateMotionTracker::filter_tau_; +double ConstantTurnRateMotionTracker::filter_dt_; +bool ConstantTurnRateMotionTracker::assume_zero_yaw_rate_; +bool ConstantTurnRateMotionTracker::trust_yaw_input_; +bool ConstantTurnRateMotionTracker::trust_twist_input_; +bool ConstantTurnRateMotionTracker::use_polar_coordinate_in_measurement_noise_; + +ConstantTurnRateMotionTracker::ConstantTurnRateMotionTracker( + const rclcpp::Time & time, const autoware_auto_perception_msgs::msg::DetectedObject & object, + const std::string & tracker_param_file, const std::uint8_t & /*label*/) +: Tracker(time, object.classification), + logger_(rclcpp::get_logger("ConstantTurnRateMotionTracker")), + last_update_time_(time), + z_(object.kinematics.pose_with_covariance.pose.position.z) +{ + object_ = object; + + // load setting from yaml file + if (!is_initialized_) { + loadDefaultModelParameters(tracker_param_file); // currently not using label + is_initialized_ = true; + } + + // shape initialization + bounding_box_ = {0.5, 0.5, 1.7}; + cylinder_ = {0.3, 1.7}; + + // initialize X matrix and position + Eigen::MatrixXd X(ekf_params_.dim_x, 1); + X(IDX::X) = object.kinematics.pose_with_covariance.pose.position.x; + X(IDX::Y) = object.kinematics.pose_with_covariance.pose.position.y; + const auto yaw = tf2::getYaw(object.kinematics.pose_with_covariance.pose.orientation); + z_ = object.kinematics.pose_with_covariance.pose.position.z; + X(IDX::YAW) = yaw; + // radar object usually have twist + if (object.kinematics.has_twist) { + const auto v = object.kinematics.twist_with_covariance.twist.linear.x; + X(IDX::VX) = v; + } else { + X(IDX::VX) = 0.0; + } + // init turn rate + X(IDX::WZ) = 0.0; + + // initialize P matrix + Eigen::MatrixXd P = Eigen::MatrixXd::Zero(ekf_params_.dim_x, ekf_params_.dim_x); + + // create rotation matrix to rotate covariance matrix + const double cos_yaw = std::cos(yaw); + const double sin_yaw = std::sin(yaw); + // 2d rotation matrix + Eigen::Matrix2d R; + R << cos_yaw, -sin_yaw, sin_yaw, cos_yaw; + + // covariance matrix in the target vehicle coordinate system + Eigen::Matrix2d P_xy_local; + P_xy_local << ekf_params_.p0_cov_x, 0.0, 0.0, ekf_params_.p0_cov_y; + + // Rotated covariance matrix + // covariance is rotated by 2D rotation matrix R + // P′=R P RT + Eigen::Matrix2d P_xy, P_v_xy; + + // Rotate the covariance matrix according to the vehicle yaw + // because p0_cov_x and y are in the vehicle coordinate system. + if (object.kinematics.has_position_covariance) { + P_xy_local << object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_X], + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_Y], + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_X], + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_Y]; + P_xy = R * P_xy_local * R.transpose(); + } else { + // rotate + P_xy = R * P_xy_local * R.transpose(); + } + // put value in P matrix + P.block<2, 2>(IDX::X, IDX::X) = P_xy; + + // covariance often written in object frame + if (object.kinematics.has_twist_covariance) { + const auto vx_cov = object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::X_X]; + // const auto vy_cov = + // object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::Y_Y]; + P(IDX::VX, IDX::VX) = vx_cov; + } else { + P(IDX::VX, IDX::VX) = ekf_params_.p0_cov_vx; + } + + P(IDX::YAW, IDX::YAW) = ekf_params_.p0_cov_yaw; + P(IDX::WZ, IDX::WZ) = ekf_params_.p0_cov_wz; + + // init shape + if (object.shape.type == autoware_auto_perception_msgs::msg::Shape::BOUNDING_BOX) { + bounding_box_ = { + object.shape.dimensions.x, object.shape.dimensions.y, object.shape.dimensions.z}; + } else if (object.shape.type == autoware_auto_perception_msgs::msg::Shape::CYLINDER) { + cylinder_ = {object.shape.dimensions.x, object.shape.dimensions.z}; + } + + ekf_.init(X, P); +} + +void ConstantTurnRateMotionTracker::loadDefaultModelParameters(const std::string & path) +{ + YAML::Node config = YAML::LoadFile(path); + // initialize ekf params + const float q_stddev_x = + config["default"]["ekf_params"]["process_noise_std"]["x"].as(); // [m] + const float q_stddev_y = + config["default"]["ekf_params"]["process_noise_std"]["y"].as(); // [m] + const float q_stddev_yaw = + config["default"]["ekf_params"]["process_noise_std"]["yaw"].as(); // [rad] + const float q_stddev_vx = + config["default"]["ekf_params"]["process_noise_std"]["vx"].as(); // [m/s] + const float q_stddev_wz = + config["default"]["ekf_params"]["process_noise_std"]["wz"].as(); // [m/s] + const float r_stddev_x = + config["default"]["ekf_params"]["measurement_noise_std"]["x"].as(); // [m] + const float r_stddev_y = + config["default"]["ekf_params"]["measurement_noise_std"]["y"].as(); // [m] + const float r_stddev_yaw = + config["default"]["ekf_params"]["measurement_noise_std"]["yaw"].as(); // [rad] + const float r_stddev_vx = + config["default"]["ekf_params"]["measurement_noise_std"]["vx"].as(); // [m/s] + const float p0_stddev_x = + config["default"]["ekf_params"]["initial_covariance_std"]["x"].as(); // [m/s] + const float p0_stddev_y = + config["default"]["ekf_params"]["initial_covariance_std"]["y"].as(); // [m/s] + const float p0_stddev_yaw = + config["default"]["ekf_params"]["initial_covariance_std"]["yaw"].as(); // [rad] + const float p0_stddev_vx = + config["default"]["ekf_params"]["initial_covariance_std"]["vx"].as(); // [m/(s)] + const float p0_stddev_wz = + config["default"]["ekf_params"]["initial_covariance_std"]["wz"].as(); // [rad/s] + assume_zero_yaw_rate_ = config["default"]["assume_zero_yaw_rate"].as(); // default false + trust_yaw_input_ = config["default"]["trust_yaw_input"].as(); // default false + trust_twist_input_ = config["default"]["trust_twist_input"].as(); // default false + use_polar_coordinate_in_measurement_noise_ = + config["default"]["use_polar_coordinate_in_measurement_noise"].as(); // default false + ekf_params_.q_cov_x = std::pow(q_stddev_x, 2.0); + ekf_params_.q_cov_y = std::pow(q_stddev_y, 2.0); + ekf_params_.q_cov_yaw = std::pow(q_stddev_yaw, 2.0); + ekf_params_.q_cov_vx = std::pow(q_stddev_vx, 2.0); + ekf_params_.q_cov_wz = std::pow(q_stddev_wz, 2.0); + ekf_params_.r_cov_x = std::pow(r_stddev_x, 2.0); + ekf_params_.r_cov_y = std::pow(r_stddev_y, 2.0); + ekf_params_.r_cov_yaw = std::pow(r_stddev_yaw, 2.0); + ekf_params_.r_cov_vx = std::pow(r_stddev_vx, 2.0); + ekf_params_.p0_cov_x = std::pow(p0_stddev_x, 2.0); + ekf_params_.p0_cov_y = std::pow(p0_stddev_y, 2.0); + ekf_params_.p0_cov_yaw = std::pow(p0_stddev_yaw, 2.0); + ekf_params_.p0_cov_vx = std::pow(p0_stddev_vx, 2.0); + ekf_params_.p0_cov_wz = std::pow(p0_stddev_wz, 2.0); + + // lpf filter parameters + filter_tau_ = config["default"]["low_pass_filter"]["time_constant"].as(); // [s] + filter_dt_ = config["default"]["low_pass_filter"]["sampling_time"].as(); // [s] + + // limitation + // (TODO): this may be written in another yaml file based on classify result + const float max_speed_kmph = config["default"]["limit"]["max_speed"].as(); // [km/h] + max_vx_ = tier4_autoware_utils::kmph2mps(max_speed_kmph); // [m/s] +} + +bool ConstantTurnRateMotionTracker::predict(const rclcpp::Time & time) +{ + const double dt = (time - last_update_time_).seconds(); + bool ret = predict(dt, ekf_); + if (ret) { + last_update_time_ = time; + } + return ret; +} + +bool ConstantTurnRateMotionTracker::predict(const double dt, KalmanFilter & ekf) const +{ + /* == Nonlinear model == + * + * x_{k+1} = x_k + vx_k * cos(yaw_k) * dt + * y_{k+1} = y_k + vx_k * sin(yaw_k) * dt + * yaw_{k+1} = yaw_k + (wz_k) * dt + * vx_{k+1} = vx_k + * wz_{k+1} = wz_k + * + */ + + /* == Linearized model == + * + * A = [ 1, 0, -vx*sin(yaw)*dt, cos(yaw)*dt, 0] + * [ 0, 1, vx*cos(yaw)*dt, sin(yaw)*dt, 0] + * [ 0, 0, 1, 0, dt] + * [ 0, 0, 0, 1, 0] + * [ 0, 0, 0, 0, 1] + */ + + const double yaw_rate_coeff = assume_zero_yaw_rate_ ? 0.0 : 1.0; + + // X t + Eigen::MatrixXd X_t(ekf_params_.dim_x, 1); // predicted state + ekf.getX(X_t); + const auto x = X_t(IDX::X); + const auto y = X_t(IDX::Y); + const auto yaw = X_t(IDX::YAW); + const auto vx = X_t(IDX::VX); + const auto wz = X_t(IDX::WZ); + + // X t+1 + Eigen::MatrixXd X_next_t(ekf_params_.dim_x, 1); + X_next_t(IDX::X) = x + vx * std::cos(yaw) * dt; + X_next_t(IDX::Y) = y + vx * std::sin(yaw) * dt; + X_next_t(IDX::YAW) = yaw + wz * dt * yaw_rate_coeff; + X_next_t(IDX::VX) = vx; + X_next_t(IDX::WZ) = wz * yaw_rate_coeff; + + // A: state transition matrix + Eigen::MatrixXd A = Eigen::MatrixXd::Identity(ekf_params_.dim_x, ekf_params_.dim_x); + A(IDX::X, IDX::YAW) = -vx * std::sin(yaw) * dt; + A(IDX::Y, IDX::YAW) = vx * std::cos(yaw) * dt; + A(IDX::X, IDX::VX) = std::cos(yaw) * dt; + A(IDX::Y, IDX::VX) = std::sin(yaw) * dt; + A(IDX::YAW, IDX::WZ) = dt * yaw_rate_coeff; + + // Q: system noise + Eigen::MatrixXd Q = Eigen::MatrixXd::Zero(ekf_params_.dim_x, ekf_params_.dim_x); + + // Rotate the covariance matrix according to the vehicle yaw + // because q_cov_x and y are in the vehicle coordinate system. + Eigen::MatrixXd Q_xy_local = Eigen::MatrixXd::Zero(2, 2); + Eigen::MatrixXd Q_xy_global = Eigen::MatrixXd::Zero(2, 2); + Q_xy_local << ekf_params_.q_cov_x, 0.0, 0.0, ekf_params_.q_cov_y; + Eigen::MatrixXd R = Eigen::MatrixXd::Zero(2, 2); + R << cos(yaw), -sin(yaw), sin(yaw), cos(yaw); + Q_xy_global = R * Q_xy_local * R.transpose(); + Q.block<2, 2>(IDX::X, IDX::X) = Q_xy_global; + + Q(IDX::YAW, IDX::YAW) = ekf_params_.q_cov_yaw; + Q(IDX::VX, IDX::VX) = ekf_params_.q_cov_vx; + Q(IDX::WZ, IDX::WZ) = ekf_params_.q_cov_wz; + + // may be unused + Eigen::MatrixXd B = Eigen::MatrixXd::Zero(ekf_params_.dim_x, ekf_params_.dim_x); + Eigen::MatrixXd u = Eigen::MatrixXd::Zero(ekf_params_.dim_x, 1); + + // call kalman filter library + if (!ekf.predict(X_next_t, A, Q)) { + RCLCPP_WARN(logger_, "Cannot predict"); + } + + return true; +} + +bool ConstantTurnRateMotionTracker::measureWithPose( + const autoware_auto_perception_msgs::msg::DetectedObject & object, + const geometry_msgs::msg::Transform & self_transform) +{ + // Observation pattern will be: + // 1. x, y, vx, vy + // 2. x, y + // 3. vx, vy (Do not consider this right now) + // + // We handle this measurements by stacking observation matrix, measurement vector and measurement + // covariance + // - observation matrix: C + // - measurement vector : Y + // - measurement covariance: R + + // get current state + Eigen::MatrixXd X(ekf_params_.dim_x, 1); + ekf_.getX(X); + const auto yaw_state = X(IDX::YAW); + + // rotation matrix + Eigen::Matrix2d RotationYaw; + if (trust_yaw_input_) { + const auto yaw = tf2::getYaw(object.kinematics.pose_with_covariance.pose.orientation); + RotationYaw << std::cos(yaw), -std::sin(yaw), std::sin(yaw), std::cos(yaw); + } else { + RotationYaw << std::cos(yaw_state), -std::sin(yaw_state), std::sin(yaw_state), + std::cos(yaw_state); + } + const auto base_link_yaw = tf2::getYaw(self_transform.rotation); + Eigen::Matrix2d RotationBaseLink; + RotationBaseLink << std::cos(base_link_yaw), -std::sin(base_link_yaw), std::sin(base_link_yaw), + std::cos(base_link_yaw); + + // depth from base_link to object + const auto dx = + object.kinematics.pose_with_covariance.pose.position.x - self_transform.translation.x; + const auto dy = + object.kinematics.pose_with_covariance.pose.position.y - self_transform.translation.y; + Eigen::Vector2d pose_diff_in_map(dx, dy); + Eigen::Vector2d pose_diff_in_base_link = RotationBaseLink * pose_diff_in_map; + const auto depth = abs(pose_diff_in_base_link(0)); + + // gather matrices as vector + std::vector C_list; + std::vector Y_list; + std::vector R_block_list; + + // 1. add position measurement + const bool enable_position_measurement = true; // assume position is always measured + if (enable_position_measurement) { + Eigen::MatrixXd Cxy = Eigen::MatrixXd::Zero(2, ekf_params_.dim_x); + Cxy(0, IDX::X) = 1; + Cxy(1, IDX::Y) = 1; + C_list.push_back(Cxy); + + Eigen::MatrixXd Yxy = Eigen::MatrixXd::Zero(2, 1); + Yxy << object.kinematics.pose_with_covariance.pose.position.x, + object.kinematics.pose_with_covariance.pose.position.y; + Y_list.push_back(Yxy); + + // covariance need to be rotated since it is in the vehicle coordinate system + Eigen::MatrixXd Rxy_local = Eigen::MatrixXd::Zero(2, 2); + Eigen::MatrixXd Rxy = Eigen::MatrixXd::Zero(2, 2); + if (!object.kinematics.has_position_covariance) { + // switch noise covariance in polar coordinate or cartesian coordinate + const auto r_cov_y = use_polar_coordinate_in_measurement_noise_ + ? depth * depth * ekf_params_.r_cov_x + : ekf_params_.r_cov_y; + Rxy_local << ekf_params_.r_cov_x, 0, 0, r_cov_y; // xy in base_link coordinate + Rxy = RotationBaseLink * Rxy_local * RotationBaseLink.transpose(); + } else { + Rxy_local << object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_X], + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_Y], + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_X], + object.kinematics.pose_with_covariance + .covariance[utils::MSG_COV_IDX::Y_Y]; // xy in vehicle coordinate + Rxy = RotationYaw * Rxy_local * RotationYaw.transpose(); + } + R_block_list.push_back(Rxy); + } + + // 2. add yaw measurement + const bool object_has_orientation = object.kinematics.orientation_availability > + 0; // 0: not available, 1: sign unknown, 2: available + const bool enable_yaw_measurement = trust_yaw_input_ && object_has_orientation; + + if (enable_yaw_measurement) { + Eigen::MatrixXd Cyaw = Eigen::MatrixXd::Zero(1, ekf_params_.dim_x); + Cyaw(0, IDX::YAW) = 1; + C_list.push_back(Cyaw); + + Eigen::MatrixXd Yyaw = Eigen::MatrixXd::Zero(1, 1); + const auto yaw = [&] { + auto obj_yaw = tier4_autoware_utils::normalizeRadian( + tf2::getYaw(object.kinematics.pose_with_covariance.pose.orientation)); + while (M_PI_2 <= yaw_state - obj_yaw) { + obj_yaw = obj_yaw + M_PI; + } + while (M_PI_2 <= obj_yaw - yaw_state) { + obj_yaw = obj_yaw - M_PI; + } + return obj_yaw; + }(); + + Yyaw << yaw; + Y_list.push_back(Yyaw); + + Eigen::MatrixXd Ryaw = Eigen::MatrixXd::Zero(1, 1); + Ryaw << ekf_params_.r_cov_yaw; + R_block_list.push_back(Ryaw); + } + + // 3. add linear velocity measurement + const bool enable_velocity_measurement = object.kinematics.has_twist && trust_twist_input_; + if (enable_velocity_measurement) { + Eigen::MatrixXd C_vx = Eigen::MatrixXd::Zero(1, ekf_params_.dim_x); + C_vx(0, IDX::VX) = 1; + C_list.push_back(C_vx); + + // measure absolute velocity + Eigen::MatrixXd Vx = Eigen::MatrixXd::Zero(1, 1); + Vx << object.kinematics.twist_with_covariance.twist.linear.x; + + Eigen::Matrix2d R_vx = Eigen::MatrixXd::Zero(1, 1); + if (!object.kinematics.has_twist_covariance) { + R_vx << ekf_params_.r_cov_vx; + } else { + R_vx << object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::X_X]; + } + R_block_list.push_back(R_vx); + } + + // 4. sum up matrices + const auto row_number = std::accumulate( + C_list.begin(), C_list.end(), 0, + [](const auto & sum, const auto & C) { return sum + C.rows(); }); + if (row_number == 0) { + RCLCPP_WARN(logger_, "No measurement is available"); + return false; + } + + // stacking matrices vertically or diagonally + const auto C = utils::stackMatricesVertically(C_list); + const auto Y = utils::stackMatricesVertically(Y_list); + const auto R = utils::stackMatricesDiagonally(R_block_list); + + // 4. EKF update + if (!ekf_.update(Y, C, R)) { + RCLCPP_WARN(logger_, "Cannot update"); + } + + // 5. normalize: limit vx + { + Eigen::MatrixXd X_t(ekf_params_.dim_x, 1); + Eigen::MatrixXd P_t(ekf_params_.dim_x, ekf_params_.dim_x); + ekf_.getX(X_t); + ekf_.getP(P_t); + if (!(-max_vx_ <= X_t(IDX::VX) && X_t(IDX::VX) <= max_vx_)) { + X_t(IDX::VX) = X_t(IDX::VX) < 0 ? -max_vx_ : max_vx_; + } + ekf_.init(X_t, P_t); + } + + // 6. Filter z + // first order low pass filter + const float gain = filter_tau_ / (filter_tau_ + filter_dt_); + z_ = gain * z_ + (1.0 - gain) * object.kinematics.pose_with_covariance.pose.position.z; + return true; +} + +bool ConstantTurnRateMotionTracker::measureWithShape( + const autoware_auto_perception_msgs::msg::DetectedObject & object) +{ + // just use first order low pass filter + const float gain = filter_tau_ / (filter_tau_ + filter_dt_); + + if (object.shape.type == autoware_auto_perception_msgs::msg::Shape::BOUNDING_BOX) { + bounding_box_.length = gain * bounding_box_.length + (1.0 - gain) * object.shape.dimensions.x; + bounding_box_.width = gain * bounding_box_.width + (1.0 - gain) * object.shape.dimensions.y; + bounding_box_.height = gain * bounding_box_.height + (1.0 - gain) * object.shape.dimensions.z; + } else if (object.shape.type == autoware_auto_perception_msgs::msg::Shape::CYLINDER) { + cylinder_.width = gain * cylinder_.width + (1.0 - gain) * object.shape.dimensions.x; + cylinder_.height = gain * cylinder_.height + (1.0 - gain) * object.shape.dimensions.z; + } else { + return false; + } + + return true; +} + +bool ConstantTurnRateMotionTracker::measure( + const autoware_auto_perception_msgs::msg::DetectedObject & object, const rclcpp::Time & time, + const geometry_msgs::msg::Transform & self_transform) +{ + const auto & current_classification = getClassification(); + object_ = object; + if (object_recognition_utils::getHighestProbLabel(object.classification) == Label::UNKNOWN) { + setClassification(current_classification); + } + + if (0.01 /*10msec*/ < std::fabs((time - last_update_time_).seconds())) { + RCLCPP_WARN( + logger_, "There is a large gap between predicted time and measurement time. (%f)", + (time - last_update_time_).seconds()); + } + + measureWithPose(object, self_transform); + measureWithShape(object); + + // (void)self_transform; // currently do not use self vehicle position + return true; +} + +bool ConstantTurnRateMotionTracker::getTrackedObject( + const rclcpp::Time & time, autoware_auto_perception_msgs::msg::TrackedObject & object) const +{ + object = object_recognition_utils::toTrackedObject(object_); + object.object_id = getUUID(); + object.classification = getClassification(); + + // predict kinematics + KalmanFilter tmp_ekf_for_no_update = ekf_; + const double dt = (time - last_update_time_).seconds(); + if (0.001 /*1msec*/ < dt) { + predict(dt, tmp_ekf_for_no_update); + } + Eigen::MatrixXd X_t(ekf_params_.dim_x, 1); // predicted state + Eigen::MatrixXd P(ekf_params_.dim_x, ekf_params_.dim_x); // predicted state + tmp_ekf_for_no_update.getX(X_t); + tmp_ekf_for_no_update.getP(P); + + auto & pose_with_cov = object.kinematics.pose_with_covariance; + auto & twist_with_cov = object.kinematics.twist_with_covariance; + // rotation matrix with yaw_ + Eigen::Matrix2d R_yaw = Eigen::Matrix2d::Zero(); + R_yaw << std::cos(X_t(IDX::YAW)), -std::sin(X_t(IDX::YAW)), std::sin(X_t(IDX::YAW)), + std::cos(X_t(IDX::YAW)); + const Eigen::Matrix2d R_yaw_inv = R_yaw.transpose(); + + // position + pose_with_cov.pose.position.x = X_t(IDX::X); + pose_with_cov.pose.position.y = X_t(IDX::Y); + pose_with_cov.pose.position.z = z_; + // quaternion + { + double roll, pitch, yaw; + tf2::Quaternion original_quaternion; + tf2::fromMsg(object_.kinematics.pose_with_covariance.pose.orientation, original_quaternion); + tf2::Matrix3x3(original_quaternion).getRPY(roll, pitch, yaw); + tf2::Quaternion filtered_quaternion; + filtered_quaternion.setRPY(roll, pitch, X_t(IDX::YAW)); + pose_with_cov.pose.orientation.x = filtered_quaternion.x(); + pose_with_cov.pose.orientation.y = filtered_quaternion.y(); + pose_with_cov.pose.orientation.z = filtered_quaternion.z(); + pose_with_cov.pose.orientation.w = filtered_quaternion.w(); + if (trust_yaw_input_) { + object.kinematics.orientation_availability = + autoware_auto_perception_msgs::msg::TrackedObjectKinematics::SIGN_UNKNOWN; + } else { + object.kinematics.orientation_availability = + autoware_auto_perception_msgs::msg::TrackedObjectKinematics::UNAVAILABLE; + } + } + // twist + // twist need to converted to local coordinate + const auto vx = X_t(IDX::VX); + twist_with_cov.twist.linear.x = vx; + + // ===== covariance transformation ===== + // since covariance in EKF is in map coordinate and output should be in object coordinate, + // we need to transform covariance + Eigen::Matrix2d P_xy_map; + P_xy_map << P(IDX::X, IDX::X), P(IDX::X, IDX::Y), P(IDX::Y, IDX::X), P(IDX::Y, IDX::Y); + + // rotate covariance with -yaw + Eigen::Matrix2d P_xy = R_yaw_inv * P_xy_map * R_yaw_inv.transpose(); + + // position covariance + constexpr double no_info_cov = 1e9; // no information + constexpr double z_cov = 1. * 1.; // TODO(yukkysaito) Currently tentative + constexpr double yaw_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative + + pose_with_cov.covariance[utils::MSG_COV_IDX::X_X] = P_xy(IDX::X, IDX::X); + pose_with_cov.covariance[utils::MSG_COV_IDX::X_Y] = P_xy(IDX::X, IDX::Y); + pose_with_cov.covariance[utils::MSG_COV_IDX::Y_X] = P_xy(IDX::Y, IDX::X); + pose_with_cov.covariance[utils::MSG_COV_IDX::Y_Y] = P_xy(IDX::Y, IDX::Y); + pose_with_cov.covariance[utils::MSG_COV_IDX::Z_Z] = z_cov; + pose_with_cov.covariance[utils::MSG_COV_IDX::ROLL_ROLL] = no_info_cov; + pose_with_cov.covariance[utils::MSG_COV_IDX::PITCH_PITCH] = no_info_cov; + pose_with_cov.covariance[utils::MSG_COV_IDX::YAW_YAW] = yaw_cov; + + // twist covariance + constexpr double vz_cov = 0.2 * 0.2; // TODO(Yoshi Ri) Currently tentative + constexpr double wz_cov = 0.2 * 0.2; // TODO(yukkysaito) Currently tentative + + twist_with_cov.covariance[utils::MSG_COV_IDX::X_X] = P(IDX::VX, IDX::VX); + twist_with_cov.covariance[utils::MSG_COV_IDX::X_Y] = 0.0; + twist_with_cov.covariance[utils::MSG_COV_IDX::Y_X] = 0.0; + twist_with_cov.covariance[utils::MSG_COV_IDX::Y_Y] = no_info_cov; + twist_with_cov.covariance[utils::MSG_COV_IDX::Z_Z] = vz_cov; + twist_with_cov.covariance[utils::MSG_COV_IDX::ROLL_ROLL] = no_info_cov; + twist_with_cov.covariance[utils::MSG_COV_IDX::PITCH_PITCH] = no_info_cov; + twist_with_cov.covariance[utils::MSG_COV_IDX::YAW_YAW] = wz_cov; + + // set shape + if (object.shape.type == autoware_auto_perception_msgs::msg::Shape::BOUNDING_BOX) { + object.shape.dimensions.x = bounding_box_.length; + object.shape.dimensions.y = bounding_box_.width; + object.shape.dimensions.z = bounding_box_.height; + } else if (object.shape.type == autoware_auto_perception_msgs::msg::Shape::CYLINDER) { + object.shape.dimensions.x = cylinder_.width; + object.shape.dimensions.y = cylinder_.width; + object.shape.dimensions.z = cylinder_.height; + } else if (object.shape.type == autoware_auto_perception_msgs::msg::Shape::POLYGON) { + const auto origin_yaw = tf2::getYaw(object_.kinematics.pose_with_covariance.pose.orientation); + const auto ekf_pose_yaw = tf2::getYaw(pose_with_cov.pose.orientation); + object.shape.footprint = + tier4_autoware_utils::rotatePolygon(object.shape.footprint, origin_yaw - ekf_pose_yaw); + } + + return true; +} diff --git a/perception/radar_object_tracker/src/tracker/model/linear_motion_tracker.cpp b/perception/radar_object_tracker/src/tracker/model/linear_motion_tracker.cpp index 62880d9471047..7bf54fbd094e2 100644 --- a/perception/radar_object_tracker/src/tracker/model/linear_motion_tracker.cpp +++ b/perception/radar_object_tracker/src/tracker/model/linear_motion_tracker.cpp @@ -43,9 +43,21 @@ using Label = autoware_auto_perception_msgs::msg::ObjectClassification; +// initialize static parameter +bool LinearMotionTracker::is_initialized_ = false; +LinearMotionTracker::EkfParams LinearMotionTracker::ekf_params_; +double LinearMotionTracker::max_vx_; +double LinearMotionTracker::max_vy_; +double LinearMotionTracker::filter_tau_; +double LinearMotionTracker::filter_dt_; +bool LinearMotionTracker::estimate_acc_; +bool LinearMotionTracker::trust_yaw_input_; +bool LinearMotionTracker::trust_twist_input_; +bool LinearMotionTracker::use_polar_coordinate_in_measurement_noise_; + LinearMotionTracker::LinearMotionTracker( const rclcpp::Time & time, const autoware_auto_perception_msgs::msg::DetectedObject & object, - const std::string & /*tracker_param_file*/, const std::uint8_t & /*label*/) + const std::string & tracker_param_file, const std::uint8_t & /*label*/) : Tracker(time, object.classification), logger_(rclcpp::get_logger("LinearMotionTracker")), last_update_time_(time), @@ -54,11 +66,13 @@ LinearMotionTracker::LinearMotionTracker( object_ = object; // load setting from yaml file - const std::string default_setting_file = - ament_index_cpp::get_package_share_directory("radar_object_tracker") + - "/config/tracking/linear_motion_tracker.yaml"; - loadDefaultModelParameters(default_setting_file); - // loadModelParameters(tracker_param_file, label); + if (!is_initialized_) { + loadDefaultModelParameters(tracker_param_file); + is_initialized_ = true; + } + // shape initialization + bounding_box_ = {0.5, 0.5, 1.7}; + cylinder_ = {0.3, 1.7}; // initialize X matrix and position Eigen::MatrixXd X(ekf_params_.dim_x, 1); @@ -192,6 +206,12 @@ void LinearMotionTracker::loadDefaultModelParameters(const std::string & path) config["default"]["ekf_params"]["initial_covariance_std"]["ax"].as(); // [m/(s*s)] const float p0_stddev_ay = config["default"]["ekf_params"]["initial_covariance_std"]["ay"].as(); // [m/(s*s)] + estimate_acc_ = config["default"]["ekf_params"]["estimate_acc"].as(); + trust_yaw_input_ = config["default"]["trust_yaw_input"].as(false); // default false + trust_twist_input_ = config["default"]["trust_twist_input"].as(false); // default false + use_polar_coordinate_in_measurement_noise_ = + config["default"]["use_polar_coordinate_in_measurement_noise"].as( + false); // default false ekf_params_.q_cov_ax = std::pow(q_stddev_ax, 2.0); ekf_params_.q_cov_ay = std::pow(q_stddev_ay, 2.0); ekf_params_.q_cov_vx = std::pow(q_stddev_vx, 2.0); @@ -218,11 +238,6 @@ void LinearMotionTracker::loadDefaultModelParameters(const std::string & path) const float max_speed_kmph = config["default"]["limit"]["max_speed"].as(); // [km/h] max_vx_ = tier4_autoware_utils::kmph2mps(max_speed_kmph); // [m/s] max_vy_ = tier4_autoware_utils::kmph2mps(max_speed_kmph); // [rad/s] - - // shape initialization - // (TODO): this may be written in another yaml file based on classify result - bounding_box_ = {0.5, 0.5, 1.7}; - cylinder_ = {0.3, 1.7}; } bool LinearMotionTracker::predict(const rclcpp::Time & time) @@ -256,6 +271,8 @@ bool LinearMotionTracker::predict(const double dt, KalmanFilter & ekf) const * 0, 0, 0, 0, 1, 0, * 0, 0, 0, 0, 0, 1] */ + // estimate acc + const double acc_coeff = estimate_acc_ ? 1.0 : 0.0; // X t Eigen::MatrixXd X_t(ekf_params_.dim_x, 1); // predicted state @@ -269,10 +286,10 @@ bool LinearMotionTracker::predict(const double dt, KalmanFilter & ekf) const // X t+1 Eigen::MatrixXd X_next_t(ekf_params_.dim_x, 1); - X_next_t(IDX::X) = x + vx * dt + 0.5 * ax * dt * dt; - X_next_t(IDX::Y) = y + vy * dt + 0.5 * ay * dt * dt; - X_next_t(IDX::VX) = vx + ax * dt; - X_next_t(IDX::VY) = vy + ay * dt; + X_next_t(IDX::X) = x + vx * dt + 0.5 * ax * dt * dt * acc_coeff; + X_next_t(IDX::Y) = y + vy * dt + 0.5 * ay * dt * dt * acc_coeff; + X_next_t(IDX::VX) = vx + ax * dt * acc_coeff; + X_next_t(IDX::VY) = vy + ay * dt * acc_coeff; X_next_t(IDX::AX) = ax; X_next_t(IDX::AY) = ay; @@ -280,10 +297,10 @@ bool LinearMotionTracker::predict(const double dt, KalmanFilter & ekf) const Eigen::MatrixXd A = Eigen::MatrixXd::Identity(ekf_params_.dim_x, ekf_params_.dim_x); A(IDX::X, IDX::VX) = dt; A(IDX::Y, IDX::VY) = dt; - A(IDX::X, IDX::AX) = 0.5 * dt * dt; - A(IDX::Y, IDX::AY) = 0.5 * dt * dt; - A(IDX::VX, IDX::AX) = dt; - A(IDX::VY, IDX::AY) = dt; + A(IDX::X, IDX::AX) = 0.5 * dt * dt * acc_coeff; + A(IDX::Y, IDX::AY) = 0.5 * dt * dt * acc_coeff; + A(IDX::VX, IDX::AX) = dt * acc_coeff; + A(IDX::VY, IDX::AY) = dt * acc_coeff; // Q: system noise Eigen::MatrixXd Q = Eigen::MatrixXd::Zero(ekf_params_.dim_x, ekf_params_.dim_x); @@ -332,7 +349,8 @@ bool LinearMotionTracker::predict(const double dt, KalmanFilter & ekf) const } bool LinearMotionTracker::measureWithPose( - const autoware_auto_perception_msgs::msg::DetectedObject & object) + const autoware_auto_perception_msgs::msg::DetectedObject & object, + const geometry_msgs::msg::Transform & self_transform) { // Observation pattern will be: // 1. x, y, vx, vy @@ -347,8 +365,25 @@ bool LinearMotionTracker::measureWithPose( // rotation matrix Eigen::Matrix2d RotationYaw; - const auto yaw = tf2::getYaw(object.kinematics.pose_with_covariance.pose.orientation); - RotationYaw << std::cos(yaw), -std::sin(yaw), std::sin(yaw), std::cos(yaw); + if (trust_yaw_input_) { + const auto yaw = tf2::getYaw(object.kinematics.pose_with_covariance.pose.orientation); + RotationYaw << std::cos(yaw), -std::sin(yaw), std::sin(yaw), std::cos(yaw); + } else { + RotationYaw << std::cos(yaw_), -std::sin(yaw_), std::sin(yaw_), std::cos(yaw_); + } + const auto base_link_yaw = tf2::getYaw(self_transform.rotation); + Eigen::Matrix2d RotationBaseLink; + RotationBaseLink << std::cos(base_link_yaw), -std::sin(base_link_yaw), std::sin(base_link_yaw), + std::cos(base_link_yaw); + + // depth from base_link to object + const auto dx = + object.kinematics.pose_with_covariance.pose.position.x - self_transform.translation.x; + const auto dy = + object.kinematics.pose_with_covariance.pose.position.y - self_transform.translation.y; + Eigen::Vector2d pose_diff_in_map(dx, dy); + Eigen::Vector2d pose_diff_in_base_link = RotationBaseLink * pose_diff_in_map; + const auto depth = abs(pose_diff_in_base_link(0)); // gather matrices as vector std::vector C_list; @@ -370,21 +405,27 @@ bool LinearMotionTracker::measureWithPose( // covariance need to be rotated since it is in the vehicle coordinate system Eigen::MatrixXd Rxy_local = Eigen::MatrixXd::Zero(2, 2); + Eigen::MatrixXd Rxy = Eigen::MatrixXd::Zero(2, 2); if (!object.kinematics.has_position_covariance) { - Rxy_local << ekf_params_.r_cov_x, 0, 0, ekf_params_.r_cov_y; + // switch noise covariance in polar coordinate or cartesian coordinate + const auto r_cov_y = use_polar_coordinate_in_measurement_noise_ + ? depth * depth * ekf_params_.r_cov_x + : ekf_params_.r_cov_y; + Rxy_local << ekf_params_.r_cov_x, 0, 0, r_cov_y; // xy in base_link coordinate + Rxy = RotationBaseLink * Rxy_local * RotationBaseLink.transpose(); } else { Rxy_local << object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_X], object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_Y], object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_X], - object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_Y]; + object.kinematics.pose_with_covariance + .covariance[utils::MSG_COV_IDX::Y_Y]; // xy in vehicle coordinate + Rxy = RotationYaw * Rxy_local * RotationYaw.transpose(); } - Eigen::MatrixXd Rxy = Eigen::MatrixXd::Zero(2, 2); - Rxy = RotationYaw * Rxy_local * RotationYaw.transpose(); R_block_list.push_back(Rxy); } // 2. add linear velocity measurement - const bool enable_velocity_measurement = object.kinematics.has_twist; + const bool enable_velocity_measurement = object.kinematics.has_twist && trust_twist_input_; if (enable_velocity_measurement) { Eigen::MatrixXd C_vx_vy = Eigen::MatrixXd::Zero(2, ekf_params_.dim_x); C_vx_vy(0, IDX::VX) = 1; @@ -400,14 +441,15 @@ bool LinearMotionTracker::measureWithPose( Y_list.push_back(Vxy); Eigen::Matrix2d R_v_xy_local = Eigen::MatrixXd::Zero(2, 2); + Eigen::MatrixXd R_v_xy = Eigen::MatrixXd::Zero(2, 2); if (!object.kinematics.has_twist_covariance) { R_v_xy_local << ekf_params_.r_cov_vx, 0, 0, ekf_params_.r_cov_vy; + R_v_xy = RotationBaseLink * R_v_xy_local * RotationBaseLink.transpose(); } else { R_v_xy_local << object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::X_X], 0, 0, object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::Y_Y]; + R_v_xy = RotationYaw * R_v_xy_local * RotationYaw.transpose(); } - Eigen::MatrixXd R_v_xy = Eigen::MatrixXd::Zero(2, 2); - R_v_xy = RotationYaw * R_v_xy_local * RotationYaw.transpose(); R_block_list.push_back(R_v_xy); } @@ -452,8 +494,16 @@ bool LinearMotionTracker::measureWithPose( // first order low pass filter const float gain = filter_tau_ / (filter_tau_ + filter_dt_); z_ = gain * z_ + (1.0 - gain) * object.kinematics.pose_with_covariance.pose.position.z; - yaw_ = gain * yaw_ + (1.0 - gain) * yaw; - + // get yaw from twist atan + Eigen::MatrixXd X_t(ekf_params_.dim_x, 1); + ekf_.getX(X_t); + const auto twist_yaw = + std::atan2(X_t(IDX::VY), X_t(IDX::VX)); // calc from lateral and longitudinal velocity + if (trust_yaw_input_) { + yaw_ = gain * yaw_ + (1.0 - gain) * twist_yaw; + } else { + yaw_ = twist_yaw; + } return true; } @@ -493,10 +543,10 @@ bool LinearMotionTracker::measure( (time - last_update_time_).seconds()); } - measureWithPose(object); + measureWithPose(object, self_transform); measureWithShape(object); - (void)self_transform; // currently do not use self vehicle position + // (void)self_transform; // currently do not use self vehicle position return true; } @@ -542,8 +592,13 @@ bool LinearMotionTracker::getTrackedObject( pose_with_cov.pose.orientation.y = filtered_quaternion.y(); pose_with_cov.pose.orientation.z = filtered_quaternion.z(); pose_with_cov.pose.orientation.w = filtered_quaternion.w(); - object.kinematics.orientation_availability = - autoware_auto_perception_msgs::msg::TrackedObjectKinematics::SIGN_UNKNOWN; + if (trust_yaw_input_) { + object.kinematics.orientation_availability = + autoware_auto_perception_msgs::msg::TrackedObjectKinematics::SIGN_UNKNOWN; + } else { + object.kinematics.orientation_availability = + autoware_auto_perception_msgs::msg::TrackedObjectKinematics::UNAVAILABLE; + } } // twist // twist need to converted to local coordinate