diff --git a/common/autoware_universe_utils/test/src/math/test_trigonometry.cpp b/common/autoware_universe_utils/test/src/math/test_trigonometry.cpp index 5c8f96ddee67f..e43db86f2bf89 100644 --- a/common/autoware_universe_utils/test/src/math/test_trigonometry.cpp +++ b/common/autoware_universe_utils/test/src/math/test_trigonometry.cpp @@ -61,8 +61,8 @@ float normalize_angle(double angle) TEST(trigonometry, opencv_fast_atan2) { - std::random_device rd; - std::mt19937 gen(rd()); + // Default initialization ensures a constant seed needed for reproducible tests + std::mt19937 gen{}; // Generate random x and y between -10.0 and 10.0 as float std::uniform_real_distribution dis(-10.0f, 10.0f); diff --git a/launch/tier4_perception_launch/launch/traffic_light_recognition/traffic_light.launch.xml b/launch/tier4_perception_launch/launch/traffic_light_recognition/traffic_light.launch.xml index d4a0a8b5abdd8..3957c27dfbad8 100644 --- a/launch/tier4_perception_launch/launch/traffic_light_recognition/traffic_light.launch.xml +++ b/launch/tier4_perception_launch/launch/traffic_light_recognition/traffic_light.launch.xml @@ -58,7 +58,8 @@ - + + diff --git a/launch/tier4_perception_launch/launch/traffic_light_recognition/traffic_light_node_container.launch.py b/launch/tier4_perception_launch/launch/traffic_light_recognition/traffic_light_node_container.launch.py index 9603570b3cfe7..1efd6f8cb3cfd 100644 --- a/launch/tier4_perception_launch/launch/traffic_light_recognition/traffic_light_node_container.launch.py +++ b/launch/tier4_perception_launch/launch/traffic_light_recognition/traffic_light_node_container.launch.py @@ -97,7 +97,7 @@ def create_parameter_dict(*args): plugin="autoware::traffic_light::TrafficLightClassifierNodelet", name="car_traffic_light_classifier", namespace="classification", - parameters=[car_traffic_light_classifier_model_param], + parameters=[car_traffic_light_classifier_model_param, {"build_only": False}], remappings=[ ("~/input/image", camera_arguments["input/image"]), ("~/input/rois", camera_arguments["output/rois"]), @@ -112,7 +112,7 @@ def create_parameter_dict(*args): plugin="autoware::traffic_light::TrafficLightClassifierNodelet", name="pedestrian_traffic_light_classifier", namespace="classification", - parameters=[pedestrian_traffic_light_classifier_model_param], + parameters=[pedestrian_traffic_light_classifier_model_param, {"build_only": False}], remappings=[ ("~/input/image", camera_arguments["input/image"]), ("~/input/rois", camera_arguments["output/rois"]), @@ -179,7 +179,7 @@ def create_parameter_dict(*args): plugin="autoware::traffic_light::TrafficLightFineDetectorNode", name="traffic_light_fine_detector", namespace=f"{namespace}/detection", - parameters=[fine_detector_model_param], + parameters=[fine_detector_model_param, {"build_only": False}], remappings=[ ("~/input/image", camera_arguments["input/image"]), ("~/input/rois", "rough/rois"), @@ -227,16 +227,14 @@ def add_launch_arg(name: str, default_value=None, description=None): # traffic_light_classifier add_launch_arg( "car_classifier_param_path", - os.path.join( - classifier_share_dir, "config", "car_traffic_light_classifier_efficientNet.param.yaml" - ), + os.path.join(classifier_share_dir, "config", "car_traffic_light_classifier.param.yaml"), ) add_launch_arg( "pedestrian_classifier_param_path", os.path.join( classifier_share_dir, "config", - "pedestrian_traffic_light_classifier_efficientNet.param.yaml", + "pedestrian_traffic_light_classifier.param.yaml", ), ) diff --git a/map/autoware_map_height_fitter/CMakeLists.txt b/map/autoware_map_height_fitter/CMakeLists.txt index 5e26a25af3a1d..4678d7ecfa24d 100644 --- a/map/autoware_map_height_fitter/CMakeLists.txt +++ b/map/autoware_map_height_fitter/CMakeLists.txt @@ -2,8 +2,8 @@ cmake_minimum_required(VERSION 3.14) project(autoware_map_height_fitter) find_package(autoware_cmake REQUIRED) -find_package(PCL REQUIRED COMPONENTS common) autoware_package() +find_package(PCL REQUIRED COMPONENTS common) ament_auto_add_library(${PROJECT_NAME} SHARED src/map_height_fitter.cpp diff --git a/perception/autoware_lidar_centerpoint/package.xml b/perception/autoware_lidar_centerpoint/package.xml index a4eb5c6758f61..cfb3e278c4ca3 100644 --- a/perception/autoware_lidar_centerpoint/package.xml +++ b/perception/autoware_lidar_centerpoint/package.xml @@ -6,6 +6,7 @@ The autoware_lidar_centerpoint package Kenzo Lobos-Tsunekawa Koji Minoda + Amadeusz Szymko Apache License 2.0 ament_cmake_auto diff --git a/perception/autoware_radar_crossing_objects_noise_filter/README.md b/perception/autoware_radar_crossing_objects_noise_filter/README.md index 4039624217a5c..134aa703faa53 100644 --- a/perception/autoware_radar_crossing_objects_noise_filter/README.md +++ b/perception/autoware_radar_crossing_objects_noise_filter/README.md @@ -67,14 +67,4 @@ To filter the objects crossing to ego vehicle, this package filter the objects a ### Parameters -- `angle_threshold` (double) [rad] - - Default parameter is 1.0472. - -This parameter is the angle threshold to filter. It has condition that 0 < `angle_threshold` < pi / 2. If the crossing angle is larger than this parameter, it can be a candidate for noise object. In other words, if it is smaller than this parameter, it is a filtered object. -If this parameter is set smaller, more objects are considered noise. In detail, see algorithm chapter. - -- `velocity_threshold` (double) [m/s] - - Default parameter is 3.0. - -This parameter is the velocity threshold to filter. If velocity of an object is larger than this parameter, it can be a candidate for noise object. In other words, if velocity of an object is smaller than this parameter, it is a filtered object. -If this parameter is set smaller, more objects are considered noise. In detail, see algorithm chapter. +{{ json_to_markdown("perception/autoware_radar_crossing_objects_noise_filter/schema/radar_crossing_objects_noise_filter.schema.json") }} diff --git a/perception/autoware_radar_crossing_objects_noise_filter/schema/radar_crossing_objects_noise_filter.schema.json b/perception/autoware_radar_crossing_objects_noise_filter/schema/radar_crossing_objects_noise_filter.schema.json new file mode 100644 index 0000000000000..a83678e8d27a1 --- /dev/null +++ b/perception/autoware_radar_crossing_objects_noise_filter/schema/radar_crossing_objects_noise_filter.schema.json @@ -0,0 +1,41 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "autoware_radar_crossing_objects_noise_filter parameter", + "type": "object", + "definitions": { + "autoware_radar_crossing_objects_noise_filter": { + "type": "object", + "properties": { + "angle_threshold": { + "type": "number", + "description": "Angle threshold to filter noise objects. Must satisfy 0 < angle_threshold < pi/2. If the crossing angle is larger than this parameter, it can be a candidate for a noise object. If smaller, it is a filtered object. A smaller value filters more objects as noise.", + "default": 1.221, + "exclusiveMinimum": 0.0, + "exclusiveMaximum": 1.5708 + }, + "velocity_threshold": { + "type": "number", + "description": "Velocity threshold to filter noise objects. If the velocity of an object is larger than this parameter, it can be a candidate for a noise object. If smaller, it is a filtered object. A smaller value filters more objects as noise.", + "default": 1.5, + "minimum": 0.0 + } + }, + "required": ["angle_threshold", "velocity_threshold"], + "additionalProperties": false + } + }, + "properties": { + "/**": { + "type": "object", + "properties": { + "ros__parameters": { + "$ref": "#/definitions/autoware_radar_crossing_objects_noise_filter" + } + }, + "required": ["ros__parameters"], + "additionalProperties": false + } + }, + "required": ["/**"], + "additionalProperties": false +} diff --git a/perception/autoware_radar_object_clustering/README.md b/perception/autoware_radar_object_clustering/README.md index cf52c33eba274..a5a5c4ee68e5a 100644 --- a/perception/autoware_radar_object_clustering/README.md +++ b/perception/autoware_radar_object_clustering/README.md @@ -65,61 +65,4 @@ So `is_fixed_size` parameter is recommended to set `true`, and size parameters i ### Parameter -- `angle_threshold` (double) [rad] - - Default parameter is 0.174. -- `distance_threshold` (double) [m] - - Default parameter is 4.0. -- `velocity_threshold` (double) [m/s] - - Default parameter is 2.0. - -These parameter are thresholds for angle, distance, and velocity to judge whether radar detections come from one object in "clustering" processing, which is written in detail at algorithm section. -If all of the difference in angle/distance/velocity from two objects is less than the thresholds, then the two objects are merged to one clustered object. -If these parameter is larger, more objects are merged to one clustered object. - -These are used in `isSameObject` function as below. - -```cpp - -bool RadarObjectClusteringNode::isSameObject( - const DetectedObject & object_1, const DetectedObject & object_2) -{ - const double angle_diff = std::abs(autoware::universe_utils::normalizeRadian( - tf2::getYaw(object_1.kinematics.pose_with_covariance.pose.orientation) - - tf2::getYaw(object_2.kinematics.pose_with_covariance.pose.orientation))); - const double velocity_diff = std::abs( - object_1.kinematics.twist_with_covariance.twist.linear.x - - object_2.kinematics.twist_with_covariance.twist.linear.x); - const double distance = autoware::universe_utils::calcDistance2d( - object_1.kinematics.pose_with_covariance.pose.position, - object_2.kinematics.pose_with_covariance.pose.position); - - if ( - distance < node_param_.distance_threshold && angle_diff < node_param_.angle_threshold && - velocity_diff < node_param_.velocity_threshold) { - return true; - } else { - return false; - } -} -``` - -- `is_fixed_label` (bool) - - Default parameter is false. -- `fixed_label` (string) - - Default parameter is "UNKNOWN". - -`is_fixed_label` is the flag to use fixed label. -If it is true, the label of a clustered object is overwritten by the label set by `fixed_label` parameter. -If the radar objects do not have label information, then it is recommended to use fixed label. - -- `is_fixed_size` (bool) - - Default parameter is false. -- `size_x` (double) [m] - - Default parameter is 4.0. -- `size_y` (double) [m] - - Default parameter is 1.5. -- `size_z` (double) [m] - - Default parameter is 1.5. - -`is_fixed_size` is the flag to use fixed size parameters. -If it is true, the size of a clustered object is overwritten by the label set by `size_x`, `size_y`, and `size_z` parameters. +{{ json_to_markdown("perception/autoware_radar_object_clustering/config/radar_object_clustering.param.yaml/schema/radar_object_clustering.schema.json") }} diff --git a/perception/autoware_radar_object_clustering/config/radar_object_clustering.param.yaml b/perception/autoware_radar_object_clustering/config/radar_object_clustering.param.yaml index 2abbf14622a3f..a7b2392f89ba5 100644 --- a/perception/autoware_radar_object_clustering/config/radar_object_clustering.param.yaml +++ b/perception/autoware_radar_object_clustering/config/radar_object_clustering.param.yaml @@ -2,14 +2,14 @@ ros__parameters: # clustering parameter angle_threshold: 0.174 # [rad] (10 deg) - distance_threshold: 10.0 # [m] - velocity_threshold: 4.0 # [m/s] + distance_threshold: 4.0 # [m] + velocity_threshold: 2.0 # [m/s] # output object settings # set false if you want to use the object information from radar - is_fixed_label: true - fixed_label: "CAR" - is_fixed_size: true + is_fixed_label: false + fixed_label: "UNKNOWN" + is_fixed_size: false size_x: 4.0 # [m] size_y: 1.5 # [m] size_z: 1.5 # [m] diff --git a/perception/autoware_radar_object_clustering/schema/radar_object_clustering.schema.json b/perception/autoware_radar_object_clustering/schema/radar_object_clustering.schema.json new file mode 100644 index 0000000000000..e4b6cb2bacb60 --- /dev/null +++ b/perception/autoware_radar_object_clustering/schema/radar_object_clustering.schema.json @@ -0,0 +1,83 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Radar Object Clustering Parameters", + "type": "object", + "definitions": { + "radar_object_clustering": { + "type": "object", + "properties": { + "angle_threshold": { + "type": "number", + "description": "Threshold for angle to determine whether radar detections come from one object during clustering. Larger values merge more objects.", + "default": 0.174 + }, + "distance_threshold": { + "type": "number", + "description": "Threshold for distance to determine whether radar detections come from one object during clustering. Larger values merge more objects.", + "default": 4.0 + }, + "velocity_threshold": { + "type": "number", + "description": "Threshold for velocity to determine whether radar detections come from one object during clustering. Larger values merge more objects.", + "default": 2.0 + }, + "is_fixed_label": { + "type": "boolean", + "description": "Flag to use a fixed label. If true, the label of a clustered object is overwritten by the `fixed_label` parameter.", + "default": false + }, + "fixed_label": { + "type": "string", + "description": "The fixed label to use when `is_fixed_label` is true.", + "default": "UNKNOWN" + }, + "is_fixed_size": { + "type": "boolean", + "description": "Flag to use fixed size parameters. If true, the size of a clustered object is overwritten by the `size_x`, `size_y`, and `size_z` parameters.", + "default": false + }, + "size_x": { + "type": "number", + "description": "Size of the clustered object in the x-dimension, used if `is_fixed_size` is true.", + "default": 4.0 + }, + "size_y": { + "type": "number", + "description": "Size of the clustered object in the y-dimension, used if `is_fixed_size` is true.", + "default": 1.5 + }, + "size_z": { + "type": "number", + "description": "Size of the clustered object in the z-dimension, used if `is_fixed_size` is true.", + "default": 1.5 + } + }, + "required": [ + "angle_threshold", + "distance_threshold", + "velocity_threshold", + "is_fixed_label", + "fixed_label", + "is_fixed_size", + "size_x", + "size_y", + "size_z" + ], + "additionalProperties": false + } + }, + "properties": { + "/**": { + "type": "object", + "properties": { + "ros__parameters": { + "$ref": "#/definitions/radar_object_clustering" + } + }, + "required": ["ros__parameters"], + "additionalProperties": false + } + }, + "required": ["/**"], + "additionalProperties": false +} diff --git a/perception/autoware_traffic_light_classifier/README.md b/perception/autoware_traffic_light_classifier/README.md index 7dcd4a73380bb..aedeac1230a4c 100644 --- a/perception/autoware_traffic_light_classifier/README.md +++ b/perception/autoware_traffic_light_classifier/README.md @@ -63,25 +63,19 @@ These colors and shapes are assigned to the message as follows: ### Node Parameters -| Name | Type | Description | -| ----------------------------- | ------ | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `classifier_type` | int | If the value is `1`, cnn_classifier is used | -| `data_path` | str | Packages data and artifacts directory path | -| `backlight_threshold` | double | If the intensity of light is grater than this threshold, the color and shape of the corresponding ROI will be overwritten with UNKNOWN, and the confidence of the overwritten signal will be set to `0.0`. The value should be set in the range of `[0.0, 1.0]`. If you wouldn't like to use this feature, please set it to `1.0`. | -| `classify_traffic_light_type` | int | If the value is `0`, vehicular signals are classified. If the value is `1`, pedestrian signals are classified. | +#### car_traffic_light_classifier + +{{ json_to_markdown("perception/autoware_traffic_light_classifier/schema/car_traffic_light_classifier.schema.json") }} + +#### pedestrian_traffic_light_classifier + +{{ json_to_markdown("perception/autoware_traffic_light_classifier/schema/pedestrian_traffic_light_classifier.schema.json") }} ### Core Parameters #### cnn_classifier -| Name | Type | Description | -| ----------------------- | --------------- | ------------------------------------ | -| `classifier_label_path` | str | path to the model file | -| `classifier_model_path` | str | path to the label file | -| `classifier_precision` | str | TensorRT precision, `fp16` or `int8` | -| `classifier_mean` | vector\ | 3-channel input image mean | -| `classifier_std` | vector\ | 3-channel input image std | -| `apply_softmax` | bool | whether or not apply softmax | +Including [this section](#car_traffic_light_classifier) #### hsv_classifier diff --git a/perception/autoware_traffic_light_classifier/config/car_traffic_light_classifier.param.yaml b/perception/autoware_traffic_light_classifier/config/car_traffic_light_classifier.param.yaml index 78490a6f8ee0d..41edea094b817 100644 --- a/perception/autoware_traffic_light_classifier/config/car_traffic_light_classifier.param.yaml +++ b/perception/autoware_traffic_light_classifier/config/car_traffic_light_classifier.param.yaml @@ -7,5 +7,5 @@ classifier_mean: [123.675, 116.28, 103.53] classifier_std: [58.395, 57.12, 57.375] backlight_threshold: 0.85 - classifier_type: 1 #classifier_type {hsv_filter: 0, cnn: 1} - classify_traffic_light_type: 0 #classify_traffic_light_type {car: 0, pedestrian:1} + classifier_type: 1 + classify_traffic_light_type: 0 diff --git a/perception/autoware_traffic_light_classifier/config/pedestrian_traffic_light_classifier.param.yaml b/perception/autoware_traffic_light_classifier/config/pedestrian_traffic_light_classifier.param.yaml index 9974ef3c55b73..4c73babfe5418 100644 --- a/perception/autoware_traffic_light_classifier/config/pedestrian_traffic_light_classifier.param.yaml +++ b/perception/autoware_traffic_light_classifier/config/pedestrian_traffic_light_classifier.param.yaml @@ -7,5 +7,5 @@ classifier_mean: [123.675, 116.28, 103.53] classifier_std: [58.395, 57.12, 57.375] backlight_threshold: 0.85 - classifier_type: 1 #classifier_type {hsv_filter: 0, cnn: 1} - classify_traffic_light_type: 1 #classify_traffic_light_type {car: 0, pedestrian:1} + classifier_type: 1 + classify_traffic_light_type: 1 diff --git a/perception/autoware_traffic_light_classifier/launch/traffic_light_classifier.launch.xml b/perception/autoware_traffic_light_classifier/launch/car_traffic_light_classifier.launch.xml similarity index 55% rename from perception/autoware_traffic_light_classifier/launch/traffic_light_classifier.launch.xml rename to perception/autoware_traffic_light_classifier/launch/car_traffic_light_classifier.launch.xml index d0cbbd3dcae9b..41a6924bf22ba 100644 --- a/perception/autoware_traffic_light_classifier/launch/traffic_light_classifier.launch.xml +++ b/perception/autoware_traffic_light_classifier/launch/car_traffic_light_classifier.launch.xml @@ -2,7 +2,11 @@ - + + + + + diff --git a/perception/autoware_traffic_light_classifier/launch/pedestrian_traffic_light_classifier.launch.xml b/perception/autoware_traffic_light_classifier/launch/pedestrian_traffic_light_classifier.launch.xml new file mode 100644 index 0000000000000..18569aeffdd1c --- /dev/null +++ b/perception/autoware_traffic_light_classifier/launch/pedestrian_traffic_light_classifier.launch.xml @@ -0,0 +1,23 @@ + + + + + + + + + + + + + + + + + + + diff --git a/perception/autoware_traffic_light_classifier/schema/car_traffic_light_classifier.schema.json b/perception/autoware_traffic_light_classifier/schema/car_traffic_light_classifier.schema.json new file mode 100644 index 0000000000000..62504836f6af3 --- /dev/null +++ b/perception/autoware_traffic_light_classifier/schema/car_traffic_light_classifier.schema.json @@ -0,0 +1,89 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "autoware_traffic_light_classifier parameter", + "type": "object", + "definitions": { + "car_traffic_light_classifier": { + "type": "object", + "properties": { + "approximate_sync": { + "type": "boolean", + "description": "Enable or disable approximate synchronization.", + "default": false + }, + "classifier_label_path": { + "type": "string", + "description": "Path to the label file for the traffic light classifier.", + "default": "$(var traffic_light_classifier_model_path)/$(var car_traffic_light_classifier_label_name)" + }, + "classifier_model_path": { + "type": "string", + "description": "Path to the ONNX model file for the traffic light classifier.", + "default": "$(var traffic_light_classifier_model_path)/$(var car_traffic_light_classifier_model_name).onnx" + }, + "classifier_precision": { + "type": "string", + "description": "Precision used for traffic light classifier inference. Valid values: [fp32, fp16, int8].", + "default": "fp16" + }, + "classifier_mean": { + "type": "array", + "description": "Mean values used for input normalization [R, G, B].", + "items": { + "type": "number" + }, + "default": [123.675, 116.28, 103.53] + }, + "classifier_std": { + "type": "array", + "description": "Standard deviation values used for input normalization [R, G, B].", + "items": { + "type": "number" + }, + "default": [58.395, 57.12, 57.375] + }, + "backlight_threshold": { + "type": "number", + "description": "If the intensity get grater than this overwrite with UNKNOWN in corresponding RoI. Note that, if the value is much higher, the node only overwrites in the harsher backlight situations. Therefore, If you wouldn't like to use this feature set this value to `1.0`. The value can be `[0.0, 1.0]`. The confidence of overwritten signal is set to `0.0`.", + "default": 0.85 + }, + "classifier_type": { + "type": "integer", + "description": "Type of classifier used. {0: hsv_filter, 1: cnn}.", + "default": 1 + }, + "classify_traffic_light_type": { + "type": "integer", + "description": "Type of traffic light to classify. {0: car, 1: pedestrian}.", + "default": 0 + } + }, + "required": [ + "approximate_sync", + "classifier_label_path", + "classifier_model_path", + "classifier_precision", + "classifier_mean", + "classifier_std", + "backlight_threshold", + "classifier_type", + "classify_traffic_light_type" + ], + "additionalProperties": false + } + }, + "properties": { + "/**": { + "type": "object", + "properties": { + "ros__parameters": { + "$ref": "#/definitions/car_traffic_light_classifier" + } + }, + "required": ["ros__parameters"], + "additionalProperties": false + } + }, + "required": ["/**"], + "additionalProperties": false +} diff --git a/perception/autoware_traffic_light_classifier/schema/pedestrian_traffic_light_classifier.schema.json b/perception/autoware_traffic_light_classifier/schema/pedestrian_traffic_light_classifier.schema.json new file mode 100644 index 0000000000000..d033afe77e7f7 --- /dev/null +++ b/perception/autoware_traffic_light_classifier/schema/pedestrian_traffic_light_classifier.schema.json @@ -0,0 +1,89 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "autoware_traffic_light_classifier parameter", + "type": "object", + "definitions": { + "pedestrian_traffic_light_classifier": { + "type": "object", + "properties": { + "approximate_sync": { + "type": "boolean", + "description": "Enable or disable approximate synchronization.", + "default": false + }, + "classifier_label_path": { + "type": "string", + "description": "Path to the label file for the traffic light classifier.", + "default": "$(var traffic_light_classifier_model_path)/$(var pedestrian_traffic_light_classifier_label_name)" + }, + "classifier_model_path": { + "type": "string", + "description": "Path to the ONNX model file for the traffic light classifier.", + "default": "$(var traffic_light_classifier_model_path)/$(var pedestrian_traffic_light_classifier_model_name).onnx" + }, + "classifier_precision": { + "type": "string", + "description": "Precision used for traffic light classifier inference. Valid values: [fp32, fp16, int8].", + "default": "fp16" + }, + "classifier_mean": { + "type": "array", + "description": "Mean values used for input normalization [R, G, B].", + "items": { + "type": "number" + }, + "default": [123.675, 116.28, 103.53] + }, + "classifier_std": { + "type": "array", + "description": "Standard deviation values used for input normalization [R, G, B].", + "items": { + "type": "number" + }, + "default": [58.395, 57.12, 57.375] + }, + "backlight_threshold": { + "type": "number", + "description": "If the intensity get grater than this overwrite with UNKNOWN in corresponding RoI. Note that, if the value is much higher, the node only overwrites in the harsher backlight situations. Therefore, If you wouldn't like to use this feature set this value to `1.0`. The value can be `[0.0, 1.0]`. The confidence of overwritten signal is set to `0.0`.", + "default": 0.85 + }, + "classifier_type": { + "type": "integer", + "description": "Type of classifier used. {0: hsv_filter, 1: cnn}.", + "default": 1 + }, + "classify_traffic_light_type": { + "type": "integer", + "description": "Type of traffic light to classify. {0: car, 1: pedestrian}.", + "default": 1 + } + }, + "required": [ + "approximate_sync", + "classifier_label_path", + "classifier_model_path", + "classifier_precision", + "classifier_mean", + "classifier_std", + "backlight_threshold", + "classifier_type", + "classify_traffic_light_type" + ], + "additionalProperties": false + } + }, + "properties": { + "/**": { + "type": "object", + "properties": { + "ros__parameters": { + "$ref": "#/definitions/pedestrian_traffic_light_classifier" + } + }, + "required": ["ros__parameters"], + "additionalProperties": false + } + }, + "required": ["/**"], + "additionalProperties": false +} diff --git a/perception/autoware_traffic_light_classifier/src/classifier/cnn_classifier.cpp b/perception/autoware_traffic_light_classifier/src/classifier/cnn_classifier.cpp index d47cb1500fffd..8dd2e2e51f2fb 100644 --- a/perception/autoware_traffic_light_classifier/src/classifier/cnn_classifier.cpp +++ b/perception/autoware_traffic_light_classifier/src/classifier/cnn_classifier.cpp @@ -34,15 +34,13 @@ CNNClassifier::CNNClassifier(rclcpp::Node * node_ptr) : node_ptr_(node_ptr) std::string precision; std::string label_file_path; std::string model_file_path; - precision = node_ptr_->declare_parameter("classifier_precision", "fp16"); - label_file_path = node_ptr_->declare_parameter("classifier_label_path", "labels.txt"); - model_file_path = node_ptr_->declare_parameter("classifier_model_path", "model.onnx"); + precision = node_ptr_->declare_parameter("classifier_precision"); + label_file_path = node_ptr_->declare_parameter("classifier_label_path"); + model_file_path = node_ptr_->declare_parameter("classifier_model_path"); // ros param does not support loading std::vector // we have to load std::vector and transfer to std::vector - auto mean_d = - node_ptr->declare_parameter("classifier_mean", std::vector{123.675, 116.28, 103.53}); - auto std_d = - node_ptr->declare_parameter("classifier_std", std::vector{58.395, 57.12, 57.375}); + auto mean_d = node_ptr->declare_parameter>("classifier_mean"); + auto std_d = node_ptr->declare_parameter>("classifier_std"); mean_ = std::vector(mean_d.begin(), mean_d.end()); std_ = std::vector(std_d.begin(), std_d.end()); if (mean_.size() != 3 || std_.size() != 3) { @@ -55,7 +53,7 @@ CNNClassifier::CNNClassifier(rclcpp::Node * node_ptr) : node_ptr_(node_ptr) classifier_ = std::make_unique( model_file_path, precision, mean_, std_); batch_size_ = classifier_->getBatchSize(); - if (node_ptr_->declare_parameter("build_only", false)) { + if (node_ptr_->declare_parameter("build_only")) { RCLCPP_INFO(node_ptr_->get_logger(), "TensorRT engine is built and shutdown node."); rclcpp::shutdown(); } diff --git a/perception/autoware_traffic_light_classifier/src/traffic_light_classifier_node.cpp b/perception/autoware_traffic_light_classifier/src/traffic_light_classifier_node.cpp index 796a144bf8266..4b03d083a271a 100644 --- a/perception/autoware_traffic_light_classifier/src/traffic_light_classifier_node.cpp +++ b/perception/autoware_traffic_light_classifier/src/traffic_light_classifier_node.cpp @@ -25,11 +25,11 @@ namespace autoware::traffic_light TrafficLightClassifierNodelet::TrafficLightClassifierNodelet(const rclcpp::NodeOptions & options) : Node("traffic_light_classifier_node", options) { - classify_traffic_light_type_ = this->declare_parameter("classify_traffic_light_type", 0); + classify_traffic_light_type_ = this->declare_parameter("classify_traffic_light_type"); using std::placeholders::_1; using std::placeholders::_2; - is_approximate_sync_ = this->declare_parameter("approximate_sync", false); + is_approximate_sync_ = this->declare_parameter("approximate_sync"); backlight_threshold_ = this->declare_parameter("backlight_threshold"); if (is_approximate_sync_) { @@ -49,8 +49,7 @@ TrafficLightClassifierNodelet::TrafficLightClassifierNodelet(const rclcpp::NodeO timer_ = rclcpp::create_timer( this, get_clock(), 100ms, std::bind(&TrafficLightClassifierNodelet::connectCb, this)); - int classifier_type = this->declare_parameter( - "classifier_type", static_cast(TrafficLightClassifierNodelet::ClassifierType::HSVFilter)); + int classifier_type = this->declare_parameter("classifier_type"); if (classifier_type == TrafficLightClassifierNodelet::ClassifierType::HSVFilter) { classifier_ptr_ = std::make_shared(this); } else if (classifier_type == TrafficLightClassifierNodelet::ClassifierType::CNN) { diff --git a/perception/autoware_traffic_light_multi_camera_fusion/README.md b/perception/autoware_traffic_light_multi_camera_fusion/README.md index b54c623f5d750..ff260546fd0cb 100644 --- a/perception/autoware_traffic_light_multi_camera_fusion/README.md +++ b/perception/autoware_traffic_light_multi_camera_fusion/README.md @@ -34,9 +34,4 @@ You don't need to configure these topics manually. Just provide the `camera_name ## Node parameters -| Parameter | Type | Description | -| ---------------------- | --------------- | ------------------------------------------------ | -| `camera_namespaces` | vector\ | Camera Namespaces to be fused | -| `message_lifespan` | double | The maximum timestamp span to be fused | -| `approximate_sync` | bool | Whether work in Approximate Synchronization Mode | -| `perform_group_fusion` | bool | Whether perform Group Fusion | +{{ json_to_markdown("perception/autoware_traffic_light_multi_camera_fusion/schema/traffic_light_multi_camera_fusion.schema.json") }} diff --git a/perception/autoware_traffic_light_multi_camera_fusion/config/traffic_light_multi_camera_fusion.param.yaml b/perception/autoware_traffic_light_multi_camera_fusion/config/traffic_light_multi_camera_fusion.param.yaml index 4e9459ce4a71c..62f8d47bf7525 100644 --- a/perception/autoware_traffic_light_multi_camera_fusion/config/traffic_light_multi_camera_fusion.param.yaml +++ b/perception/autoware_traffic_light_multi_camera_fusion/config/traffic_light_multi_camera_fusion.param.yaml @@ -3,4 +3,3 @@ camera_namespaces: ["camera6", "camera7"] message_lifespan: 0.09 approximate_sync: false - perform_group_fusion: true diff --git a/perception/autoware_traffic_light_multi_camera_fusion/schema/traffic_light_multi_camera_fusion.schema.json b/perception/autoware_traffic_light_multi_camera_fusion/schema/traffic_light_multi_camera_fusion.schema.json new file mode 100644 index 0000000000000..bdd6dca516fa5 --- /dev/null +++ b/perception/autoware_traffic_light_multi_camera_fusion/schema/traffic_light_multi_camera_fusion.schema.json @@ -0,0 +1,46 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "autoware_traffic_light_multi_camera_fusion parameter", + "type": "object", + "definitions": { + "autoware_traffic_light_multi_camera_fusion": { + "type": "object", + "properties": { + "camera_namespaces": { + "type": "array", + "description": "Camera namespaces to be fused.", + "items": { + "type": "string" + }, + "default": [] + }, + "message_lifespan": { + "type": "number", + "description": "The maximum timestamp span to be fused.", + "default": 0.0 + }, + "approximate_sync": { + "type": "boolean", + "description": "Whether to work in Approximate Synchronization Mode.", + "default": false + } + }, + "required": ["camera_namespaces", "message_lifespan", "approximate_sync"], + "additionalProperties": false + } + }, + "properties": { + "/**": { + "type": "object", + "properties": { + "ros__parameters": { + "$ref": "#/definitions/autoware_traffic_light_multi_camera_fusion" + } + }, + "required": ["ros__parameters"], + "additionalProperties": false + } + }, + "required": ["/**"], + "additionalProperties": false +} diff --git a/perception/autoware_traffic_light_multi_camera_fusion/src/traffic_light_multi_camera_fusion_node.cpp b/perception/autoware_traffic_light_multi_camera_fusion/src/traffic_light_multi_camera_fusion_node.cpp index 15211920bc7f2..8655e3fabe334 100644 --- a/perception/autoware_traffic_light_multi_camera_fusion/src/traffic_light_multi_camera_fusion_node.cpp +++ b/perception/autoware_traffic_light_multi_camera_fusion/src/traffic_light_multi_camera_fusion_node.cpp @@ -148,9 +148,9 @@ MultiCameraFusion::MultiCameraFusion(const rclcpp::NodeOptions & node_options) using std::placeholders::_3; std::vector camera_namespaces = - this->declare_parameter("camera_namespaces", std::vector{}); - is_approximate_sync_ = this->declare_parameter("approximate_sync", false); - message_lifespan_ = this->declare_parameter("message_lifespan", 0.09); + this->declare_parameter>("camera_namespaces"); + is_approximate_sync_ = this->declare_parameter("approximate_sync"); + message_lifespan_ = this->declare_parameter("message_lifespan"); for (const std::string & camera_ns : camera_namespaces) { std::string signal_topic = camera_ns + "/classification/traffic_signals"; std::string roi_topic = camera_ns + "/detection/rois"; diff --git a/planning/behavior_path_planner/autoware_behavior_path_goal_planner_module/include/autoware/behavior_path_goal_planner_module/goal_planner_module.hpp b/planning/behavior_path_planner/autoware_behavior_path_goal_planner_module/include/autoware/behavior_path_goal_planner_module/goal_planner_module.hpp index 7b5539fc7e5f8..5514c2d5d1fd1 100644 --- a/planning/behavior_path_planner/autoware_behavior_path_goal_planner_module/include/autoware/behavior_path_goal_planner_module/goal_planner_module.hpp +++ b/planning/behavior_path_planner/autoware_behavior_path_goal_planner_module/include/autoware/behavior_path_goal_planner_module/goal_planner_module.hpp @@ -73,14 +73,6 @@ struct GoalPlannerDebugData utils::path_safety_checker::CollisionCheckDebugMap collision_check{}; }; -struct LastApprovalData -{ - LastApprovalData(rclcpp::Time time, Pose pose) : time(time), pose(pose) {} - - rclcpp::Time time{}; - Pose pose{}; -}; - struct PullOverContextData { PullOverContextData() = delete; @@ -352,7 +344,7 @@ class GoalPlannerModule : public SceneModuleInterface std::optional context_data_{std::nullopt}; // path_decision_controller is updated in updateData(), and used in plan() PathDecisionStateController path_decision_controller_{getLogger()}; - std::unique_ptr last_approval_data_{nullptr}; + std::optional decided_time_{}; // approximate distance from the start point to the end point of pull_over. // this is used as an assumed value to decelerate, etc., before generating the actual path. diff --git a/planning/behavior_path_planner/autoware_behavior_path_goal_planner_module/src/goal_planner_module.cpp b/planning/behavior_path_planner/autoware_behavior_path_goal_planner_module/src/goal_planner_module.cpp index 43656a43888ae..d5615a76e14b0 100644 --- a/planning/behavior_path_planner/autoware_behavior_path_goal_planner_module/src/goal_planner_module.cpp +++ b/planning/behavior_path_planner/autoware_behavior_path_goal_planner_module/src/goal_planner_module.cpp @@ -778,30 +778,38 @@ void GoalPlannerModule::updateData() pull_over_path_recv, clock_->now(), static_target_objects, dynamic_target_objects, planner_data_, occupancy_grid_map_, is_current_safe, parameters_, goal_searcher, debug_data_.ego_polygons_expanded); + const auto new_decision_state = path_decision_controller_.get_current_state(); auto [lane_parking_response, freespace_parking_response] = syncWithThreads(); + + // NOTE: currently occupancy_grid_map_ must be used after syncWithThreads + goal_searcher.update(goal_candidates_, occupancy_grid_map_, planner_data_, static_target_objects); + if (context_data_) { context_data_.value().update( - path_decision_controller_.get_current_state().is_stable_safe, static_target_objects, - dynamic_target_objects, prev_decision_state, + new_decision_state.is_stable_safe, static_target_objects, dynamic_target_objects, + prev_decision_state, isStopped( odometry_buffer_stopped_, planner_data_->self_odometry, parameters_.th_stopped_time, parameters_.th_stopped_velocity), std::move(lane_parking_response), std::move(freespace_parking_response)); } else { context_data_.emplace( - path_decision_controller_.get_current_state().is_stable_safe, static_target_objects, - dynamic_target_objects, prev_decision_state, + new_decision_state.is_stable_safe, static_target_objects, dynamic_target_objects, + prev_decision_state, isStopped( odometry_buffer_stopped_, planner_data_->self_odometry, parameters_.th_stopped_time, parameters_.th_stopped_velocity), std::move(lane_parking_response), std::move(freespace_parking_response)); } - const auto & ctx_data = context_data_.value(); - goal_searcher.update( - goal_candidates_, occupancy_grid_map_, planner_data_, ctx_data.static_target_objects); auto & ctx_data_mut = context_data_.value(); + if (!decided_time_ && new_decision_state.state == PathDecisionState::DecisionKind::DECIDED) { + decided_time_ = clock_->now(); + // TODO(soblin): do not "plan" in updateData + if (ctx_data_mut.pull_over_path_opt) decideVelocity(ctx_data_mut.pull_over_path_opt.value()); + } + if (!isActivated()) { return; } @@ -814,13 +822,6 @@ void GoalPlannerModule::updateData() } } } - - if (!last_approval_data_) { - last_approval_data_ = - std::make_unique(clock_->now(), planner_data_->self_odometry->pose.pose); - // TODO(soblin): do not "plan" in updateData - if (ctx_data_mut.pull_over_path_opt) decideVelocity(ctx_data_mut.pull_over_path_opt.value()); - } } void GoalPlannerModule::processOnExit() @@ -829,7 +830,6 @@ void GoalPlannerModule::processOnExit() resetPathReference(); debug_marker_.markers.clear(); context_data_ = std::nullopt; - last_approval_data_.reset(); } bool GoalPlannerModule::isExecutionRequested() const @@ -1850,7 +1850,7 @@ bool GoalPlannerModule::hasFinishedCurrentPath(const PullOverContextData & ctx_d { universe_utils::ScopedTimeTrack st(__func__, *time_keeper_); - if (!last_approval_data_) { + if (!decided_time_) { return false; } @@ -1860,10 +1860,10 @@ bool GoalPlannerModule::hasFinishedCurrentPath(const PullOverContextData & ctx_d // check if enough time has passed since last approval // this is necessary to give turn signal for enough time - const bool has_passed_enough_time_from_approval = - (clock_->now() - last_approval_data_->time).seconds() > + const bool has_passed_enough_time_from_decided = + (clock_->now() - decided_time_.value()).seconds() > planner_data_->parameters.turn_signal_search_time; - if (!has_passed_enough_time_from_approval) { + if (!has_passed_enough_time_from_decided) { return false; } diff --git a/planning/behavior_velocity_planner/autoware_behavior_velocity_traffic_light_module/README.md b/planning/behavior_velocity_planner/autoware_behavior_velocity_traffic_light_module/README.md index e85a171495e38..15e707ee32c62 100644 --- a/planning/behavior_velocity_planner/autoware_behavior_velocity_traffic_light_module/README.md +++ b/planning/behavior_velocity_planner/autoware_behavior_velocity_traffic_light_module/README.md @@ -29,9 +29,9 @@ This module is activated when there is traffic light in ego lane. 3. When vehicle current velocity is - - higher than 2.0m/s ⇒ pass judge(using next slide formula) + - higher than `yellow_light_stop_velocity` m/s ⇒ pass judge(using next slide formula) - - lower than 2.0m/s ⇒ stop + - lower than `yellow_light_stop_velocity` m/s ⇒ stop 4. When it to be judged that vehicle can’t stop before stop line, autoware chooses one of the following behaviors @@ -69,13 +69,14 @@ This module is activated when there is traffic light in ego lane. #### Module Parameters -| Parameter | Type | Description | -| ---------------------- | ------ | -------------------------------------------------------------------- | -| `stop_margin` | double | [m] margin before stop point | -| `tl_state_timeout` | double | [s] time out for detected traffic light result. | -| `stop_time_hysteresis` | double | [s] time threshold to decide stop planning for chattering prevention | -| `yellow_lamp_period` | double | [s] time for yellow lamp | -| `enable_pass_judge` | bool | [-] whether to use pass judge | +| Parameter | Type | Description | +| ---------------------------- | ------ | -------------------------------------------------------------------- | +| `stop_margin` | double | [m] margin before stop point | +| `tl_state_timeout` | double | [s] time out for detected traffic light result. | +| `stop_time_hysteresis` | double | [s] time threshold to decide stop planning for chattering prevention | +| `yellow_lamp_period` | double | [s] time for yellow lamp | +| `yellow_light_stop_velocity` | double | [m/s] velocity threshold for always stopping at a yellow light. | +| `enable_pass_judge` | bool | [-] whether to use pass judge | #### Flowchart diff --git a/planning/behavior_velocity_planner/autoware_behavior_velocity_traffic_light_module/config/traffic_light.param.yaml b/planning/behavior_velocity_planner/autoware_behavior_velocity_traffic_light_module/config/traffic_light.param.yaml index 23746a61b6043..253fb69043f7c 100644 --- a/planning/behavior_velocity_planner/autoware_behavior_velocity_traffic_light_module/config/traffic_light.param.yaml +++ b/planning/behavior_velocity_planner/autoware_behavior_velocity_traffic_light_module/config/traffic_light.param.yaml @@ -5,5 +5,6 @@ tl_state_timeout: 1.0 stop_time_hysteresis: 0.1 yellow_lamp_period: 2.75 + yellow_light_stop_velocity: 1.0 # Velocity threshold (m/s) below which the vehicle will always stop before the traffic light when the signal turns yellow, regardless of the pass_judge decision. enable_pass_judge: true enable_rtc: false # If set to true, the scene modules require approval from the rtc (request to cooperate) function. If set to false, the modules can be executed without requiring rtc approval diff --git a/planning/behavior_velocity_planner/autoware_behavior_velocity_traffic_light_module/src/manager.cpp b/planning/behavior_velocity_planner/autoware_behavior_velocity_traffic_light_module/src/manager.cpp index 1d1ee7fc996b2..dc008083bb0dd 100644 --- a/planning/behavior_velocity_planner/autoware_behavior_velocity_traffic_light_module/src/manager.cpp +++ b/planning/behavior_velocity_planner/autoware_behavior_velocity_traffic_light_module/src/manager.cpp @@ -42,6 +42,8 @@ TrafficLightModuleManager::TrafficLightModuleManager(rclcpp::Node & node) planner_param_.enable_pass_judge = getOrDeclareParameter(node, ns + ".enable_pass_judge"); planner_param_.yellow_lamp_period = getOrDeclareParameter(node, ns + ".yellow_lamp_period"); + planner_param_.yellow_light_stop_velocity = + getOrDeclareParameter(node, ns + ".yellow_light_stop_velocity"); pub_tl_state_ = node.create_publisher( "~/output/traffic_signal", 1); } diff --git a/planning/behavior_velocity_planner/autoware_behavior_velocity_traffic_light_module/src/scene.cpp b/planning/behavior_velocity_planner/autoware_behavior_velocity_traffic_light_module/src/scene.cpp index 3a8692e9c53dd..f46edd6c52f81 100644 --- a/planning/behavior_velocity_planner/autoware_behavior_velocity_traffic_light_module/src/scene.cpp +++ b/planning/behavior_velocity_planner/autoware_behavior_velocity_traffic_light_module/src/scene.cpp @@ -213,7 +213,8 @@ bool TrafficLightModule::isPassthrough(const double & signed_arc_length) const delay_response_time); const bool distance_stoppable = pass_judge_line_distance < signed_arc_length; - const bool slow_velocity = planner_data_->current_velocity->twist.linear.x < 2.0; + const bool slow_velocity = + planner_data_->current_velocity->twist.linear.x < planner_param_.yellow_light_stop_velocity; const bool stoppable = distance_stoppable || slow_velocity; const bool reachable = signed_arc_length < reachable_distance; diff --git a/planning/behavior_velocity_planner/autoware_behavior_velocity_traffic_light_module/src/scene.hpp b/planning/behavior_velocity_planner/autoware_behavior_velocity_traffic_light_module/src/scene.hpp index 71f0855a4af6f..12e1ba2537f92 100644 --- a/planning/behavior_velocity_planner/autoware_behavior_velocity_traffic_light_module/src/scene.hpp +++ b/planning/behavior_velocity_planner/autoware_behavior_velocity_traffic_light_module/src/scene.hpp @@ -61,6 +61,7 @@ class TrafficLightModule : public SceneModuleInterfaceWithRTC double stop_margin; double tl_state_timeout; double yellow_lamp_period; + double yellow_light_stop_velocity; double stop_time_hysteresis; bool enable_pass_judge; }; diff --git a/sensing/autoware_radar_tracks_noise_filter/README.md b/sensing/autoware_radar_tracks_noise_filter/README.md index 25e8d2f96e7f7..47f522a86bbcc 100644 --- a/sensing/autoware_radar_tracks_noise_filter/README.md +++ b/sensing/autoware_radar_tracks_noise_filter/README.md @@ -29,6 +29,4 @@ In y-axis threshold filter, if y-axis velocity of RadarTrack is more than `veloc ## Parameters -| Name | Type | Description | Default value | -| :--------------------- | :----- | :--------------------------------------------------------------------------------------------------------------------------------- | :------------ | -| `velocity_y_threshold` | double | Y-axis velocity threshold [m/s]. If y-axis velocity of RadarTrack is more than `velocity_y_threshold`, it treats as noise objects. | 7.0 | +{{ json_to_markdown("sensing/autoware_radar_tracks_noise_filter/schema/list\[[path](./path.md)\] | no | List of path objects for importing subgraphs. | -| `units` | list\[[unit](./unit.md)\] | no | List of unit objects that make up the graph. | -| `edits` | list\[[edit](./edit.md)\] | no | List of edit objects to partially edit the graph. | +| Name | Type | Required | Description | +| ------- | -------------------------------------- | -------- | --------------------------------------------- | +| `files` | list\[[path](./path.md)\] | no | List of path objects for importing subgraphs. | +| `units` | list\[[unit](./unit.md)\] | no | List of unit objects that make up the graph. | diff --git a/system/autoware_diagnostic_graph_aggregator/example/example-edit.launch.xml b/system/autoware_diagnostic_graph_aggregator/example/example-edit.launch.xml deleted file mode 100644 index 1a15bbe929f6e..0000000000000 --- a/system/autoware_diagnostic_graph_aggregator/example/example-edit.launch.xml +++ /dev/null @@ -1,6 +0,0 @@ - - - - - - diff --git a/system/autoware_diagnostic_graph_aggregator/example/graph/edit.yaml b/system/autoware_diagnostic_graph_aggregator/example/graph/edit.yaml deleted file mode 100644 index 2fbdbffca438d..0000000000000 --- a/system/autoware_diagnostic_graph_aggregator/example/graph/edit.yaml +++ /dev/null @@ -1,6 +0,0 @@ -files: - - { path: $(find-pkg-share diagnostic_graph_aggregator)/example/graph/main.yaml } - -edits: - - path: /functions/obstacle_detection - type: remove