diff --git a/launch/tier4_perception_launch/launch/traffic_light_recognition/traffic_light_node_container.launch.py b/launch/tier4_perception_launch/launch/traffic_light_recognition/traffic_light_node_container.launch.py index 9603570b3cfe7..1efd6f8cb3cfd 100644 --- a/launch/tier4_perception_launch/launch/traffic_light_recognition/traffic_light_node_container.launch.py +++ b/launch/tier4_perception_launch/launch/traffic_light_recognition/traffic_light_node_container.launch.py @@ -97,7 +97,7 @@ def create_parameter_dict(*args): plugin="autoware::traffic_light::TrafficLightClassifierNodelet", name="car_traffic_light_classifier", namespace="classification", - parameters=[car_traffic_light_classifier_model_param], + parameters=[car_traffic_light_classifier_model_param, {"build_only": False}], remappings=[ ("~/input/image", camera_arguments["input/image"]), ("~/input/rois", camera_arguments["output/rois"]), @@ -112,7 +112,7 @@ def create_parameter_dict(*args): plugin="autoware::traffic_light::TrafficLightClassifierNodelet", name="pedestrian_traffic_light_classifier", namespace="classification", - parameters=[pedestrian_traffic_light_classifier_model_param], + parameters=[pedestrian_traffic_light_classifier_model_param, {"build_only": False}], remappings=[ ("~/input/image", camera_arguments["input/image"]), ("~/input/rois", camera_arguments["output/rois"]), @@ -179,7 +179,7 @@ def create_parameter_dict(*args): plugin="autoware::traffic_light::TrafficLightFineDetectorNode", name="traffic_light_fine_detector", namespace=f"{namespace}/detection", - parameters=[fine_detector_model_param], + parameters=[fine_detector_model_param, {"build_only": False}], remappings=[ ("~/input/image", camera_arguments["input/image"]), ("~/input/rois", "rough/rois"), @@ -227,16 +227,14 @@ def add_launch_arg(name: str, default_value=None, description=None): # traffic_light_classifier add_launch_arg( "car_classifier_param_path", - os.path.join( - classifier_share_dir, "config", "car_traffic_light_classifier_efficientNet.param.yaml" - ), + os.path.join(classifier_share_dir, "config", "car_traffic_light_classifier.param.yaml"), ) add_launch_arg( "pedestrian_classifier_param_path", os.path.join( classifier_share_dir, "config", - "pedestrian_traffic_light_classifier_efficientNet.param.yaml", + "pedestrian_traffic_light_classifier.param.yaml", ), ) diff --git a/perception/autoware_traffic_light_classifier/README.md b/perception/autoware_traffic_light_classifier/README.md index 7dcd4a73380bb..aedeac1230a4c 100644 --- a/perception/autoware_traffic_light_classifier/README.md +++ b/perception/autoware_traffic_light_classifier/README.md @@ -63,25 +63,19 @@ These colors and shapes are assigned to the message as follows: ### Node Parameters -| Name | Type | Description | -| ----------------------------- | ------ | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `classifier_type` | int | If the value is `1`, cnn_classifier is used | -| `data_path` | str | Packages data and artifacts directory path | -| `backlight_threshold` | double | If the intensity of light is grater than this threshold, the color and shape of the corresponding ROI will be overwritten with UNKNOWN, and the confidence of the overwritten signal will be set to `0.0`. The value should be set in the range of `[0.0, 1.0]`. If you wouldn't like to use this feature, please set it to `1.0`. | -| `classify_traffic_light_type` | int | If the value is `0`, vehicular signals are classified. If the value is `1`, pedestrian signals are classified. | +#### car_traffic_light_classifier + +{{ json_to_markdown("perception/autoware_traffic_light_classifier/schema/car_traffic_light_classifier.schema.json") }} + +#### pedestrian_traffic_light_classifier + +{{ json_to_markdown("perception/autoware_traffic_light_classifier/schema/pedestrian_traffic_light_classifier.schema.json") }} ### Core Parameters #### cnn_classifier -| Name | Type | Description | -| ----------------------- | --------------- | ------------------------------------ | -| `classifier_label_path` | str | path to the model file | -| `classifier_model_path` | str | path to the label file | -| `classifier_precision` | str | TensorRT precision, `fp16` or `int8` | -| `classifier_mean` | vector\ | 3-channel input image mean | -| `classifier_std` | vector\ | 3-channel input image std | -| `apply_softmax` | bool | whether or not apply softmax | +Including [this section](#car_traffic_light_classifier) #### hsv_classifier diff --git a/perception/autoware_traffic_light_classifier/config/car_traffic_light_classifier.param.yaml b/perception/autoware_traffic_light_classifier/config/car_traffic_light_classifier.param.yaml index 78490a6f8ee0d..41edea094b817 100644 --- a/perception/autoware_traffic_light_classifier/config/car_traffic_light_classifier.param.yaml +++ b/perception/autoware_traffic_light_classifier/config/car_traffic_light_classifier.param.yaml @@ -7,5 +7,5 @@ classifier_mean: [123.675, 116.28, 103.53] classifier_std: [58.395, 57.12, 57.375] backlight_threshold: 0.85 - classifier_type: 1 #classifier_type {hsv_filter: 0, cnn: 1} - classify_traffic_light_type: 0 #classify_traffic_light_type {car: 0, pedestrian:1} + classifier_type: 1 + classify_traffic_light_type: 0 diff --git a/perception/autoware_traffic_light_classifier/config/pedestrian_traffic_light_classifier.param.yaml b/perception/autoware_traffic_light_classifier/config/pedestrian_traffic_light_classifier.param.yaml index 9974ef3c55b73..4c73babfe5418 100644 --- a/perception/autoware_traffic_light_classifier/config/pedestrian_traffic_light_classifier.param.yaml +++ b/perception/autoware_traffic_light_classifier/config/pedestrian_traffic_light_classifier.param.yaml @@ -7,5 +7,5 @@ classifier_mean: [123.675, 116.28, 103.53] classifier_std: [58.395, 57.12, 57.375] backlight_threshold: 0.85 - classifier_type: 1 #classifier_type {hsv_filter: 0, cnn: 1} - classify_traffic_light_type: 1 #classify_traffic_light_type {car: 0, pedestrian:1} + classifier_type: 1 + classify_traffic_light_type: 1 diff --git a/perception/autoware_traffic_light_classifier/launch/traffic_light_classifier.launch.xml b/perception/autoware_traffic_light_classifier/launch/car_traffic_light_classifier.launch.xml similarity index 55% rename from perception/autoware_traffic_light_classifier/launch/traffic_light_classifier.launch.xml rename to perception/autoware_traffic_light_classifier/launch/car_traffic_light_classifier.launch.xml index d0cbbd3dcae9b..41a6924bf22ba 100644 --- a/perception/autoware_traffic_light_classifier/launch/traffic_light_classifier.launch.xml +++ b/perception/autoware_traffic_light_classifier/launch/car_traffic_light_classifier.launch.xml @@ -2,7 +2,11 @@ - + + + + + diff --git a/perception/autoware_traffic_light_classifier/launch/pedestrian_traffic_light_classifier.launch.xml b/perception/autoware_traffic_light_classifier/launch/pedestrian_traffic_light_classifier.launch.xml new file mode 100644 index 0000000000000..18569aeffdd1c --- /dev/null +++ b/perception/autoware_traffic_light_classifier/launch/pedestrian_traffic_light_classifier.launch.xml @@ -0,0 +1,23 @@ + + + + + + + + + + + + + + + + + + + diff --git a/perception/autoware_traffic_light_classifier/schema/car_traffic_light_classifier.schema.json b/perception/autoware_traffic_light_classifier/schema/car_traffic_light_classifier.schema.json new file mode 100644 index 0000000000000..62504836f6af3 --- /dev/null +++ b/perception/autoware_traffic_light_classifier/schema/car_traffic_light_classifier.schema.json @@ -0,0 +1,89 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "autoware_traffic_light_classifier parameter", + "type": "object", + "definitions": { + "car_traffic_light_classifier": { + "type": "object", + "properties": { + "approximate_sync": { + "type": "boolean", + "description": "Enable or disable approximate synchronization.", + "default": false + }, + "classifier_label_path": { + "type": "string", + "description": "Path to the label file for the traffic light classifier.", + "default": "$(var traffic_light_classifier_model_path)/$(var car_traffic_light_classifier_label_name)" + }, + "classifier_model_path": { + "type": "string", + "description": "Path to the ONNX model file for the traffic light classifier.", + "default": "$(var traffic_light_classifier_model_path)/$(var car_traffic_light_classifier_model_name).onnx" + }, + "classifier_precision": { + "type": "string", + "description": "Precision used for traffic light classifier inference. Valid values: [fp32, fp16, int8].", + "default": "fp16" + }, + "classifier_mean": { + "type": "array", + "description": "Mean values used for input normalization [R, G, B].", + "items": { + "type": "number" + }, + "default": [123.675, 116.28, 103.53] + }, + "classifier_std": { + "type": "array", + "description": "Standard deviation values used for input normalization [R, G, B].", + "items": { + "type": "number" + }, + "default": [58.395, 57.12, 57.375] + }, + "backlight_threshold": { + "type": "number", + "description": "If the intensity get grater than this overwrite with UNKNOWN in corresponding RoI. Note that, if the value is much higher, the node only overwrites in the harsher backlight situations. Therefore, If you wouldn't like to use this feature set this value to `1.0`. The value can be `[0.0, 1.0]`. The confidence of overwritten signal is set to `0.0`.", + "default": 0.85 + }, + "classifier_type": { + "type": "integer", + "description": "Type of classifier used. {0: hsv_filter, 1: cnn}.", + "default": 1 + }, + "classify_traffic_light_type": { + "type": "integer", + "description": "Type of traffic light to classify. {0: car, 1: pedestrian}.", + "default": 0 + } + }, + "required": [ + "approximate_sync", + "classifier_label_path", + "classifier_model_path", + "classifier_precision", + "classifier_mean", + "classifier_std", + "backlight_threshold", + "classifier_type", + "classify_traffic_light_type" + ], + "additionalProperties": false + } + }, + "properties": { + "/**": { + "type": "object", + "properties": { + "ros__parameters": { + "$ref": "#/definitions/car_traffic_light_classifier" + } + }, + "required": ["ros__parameters"], + "additionalProperties": false + } + }, + "required": ["/**"], + "additionalProperties": false +} diff --git a/perception/autoware_traffic_light_classifier/schema/pedestrian_traffic_light_classifier.schema.json b/perception/autoware_traffic_light_classifier/schema/pedestrian_traffic_light_classifier.schema.json new file mode 100644 index 0000000000000..d033afe77e7f7 --- /dev/null +++ b/perception/autoware_traffic_light_classifier/schema/pedestrian_traffic_light_classifier.schema.json @@ -0,0 +1,89 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "autoware_traffic_light_classifier parameter", + "type": "object", + "definitions": { + "pedestrian_traffic_light_classifier": { + "type": "object", + "properties": { + "approximate_sync": { + "type": "boolean", + "description": "Enable or disable approximate synchronization.", + "default": false + }, + "classifier_label_path": { + "type": "string", + "description": "Path to the label file for the traffic light classifier.", + "default": "$(var traffic_light_classifier_model_path)/$(var pedestrian_traffic_light_classifier_label_name)" + }, + "classifier_model_path": { + "type": "string", + "description": "Path to the ONNX model file for the traffic light classifier.", + "default": "$(var traffic_light_classifier_model_path)/$(var pedestrian_traffic_light_classifier_model_name).onnx" + }, + "classifier_precision": { + "type": "string", + "description": "Precision used for traffic light classifier inference. Valid values: [fp32, fp16, int8].", + "default": "fp16" + }, + "classifier_mean": { + "type": "array", + "description": "Mean values used for input normalization [R, G, B].", + "items": { + "type": "number" + }, + "default": [123.675, 116.28, 103.53] + }, + "classifier_std": { + "type": "array", + "description": "Standard deviation values used for input normalization [R, G, B].", + "items": { + "type": "number" + }, + "default": [58.395, 57.12, 57.375] + }, + "backlight_threshold": { + "type": "number", + "description": "If the intensity get grater than this overwrite with UNKNOWN in corresponding RoI. Note that, if the value is much higher, the node only overwrites in the harsher backlight situations. Therefore, If you wouldn't like to use this feature set this value to `1.0`. The value can be `[0.0, 1.0]`. The confidence of overwritten signal is set to `0.0`.", + "default": 0.85 + }, + "classifier_type": { + "type": "integer", + "description": "Type of classifier used. {0: hsv_filter, 1: cnn}.", + "default": 1 + }, + "classify_traffic_light_type": { + "type": "integer", + "description": "Type of traffic light to classify. {0: car, 1: pedestrian}.", + "default": 1 + } + }, + "required": [ + "approximate_sync", + "classifier_label_path", + "classifier_model_path", + "classifier_precision", + "classifier_mean", + "classifier_std", + "backlight_threshold", + "classifier_type", + "classify_traffic_light_type" + ], + "additionalProperties": false + } + }, + "properties": { + "/**": { + "type": "object", + "properties": { + "ros__parameters": { + "$ref": "#/definitions/pedestrian_traffic_light_classifier" + } + }, + "required": ["ros__parameters"], + "additionalProperties": false + } + }, + "required": ["/**"], + "additionalProperties": false +} diff --git a/perception/autoware_traffic_light_classifier/src/classifier/cnn_classifier.cpp b/perception/autoware_traffic_light_classifier/src/classifier/cnn_classifier.cpp index d47cb1500fffd..8dd2e2e51f2fb 100644 --- a/perception/autoware_traffic_light_classifier/src/classifier/cnn_classifier.cpp +++ b/perception/autoware_traffic_light_classifier/src/classifier/cnn_classifier.cpp @@ -34,15 +34,13 @@ CNNClassifier::CNNClassifier(rclcpp::Node * node_ptr) : node_ptr_(node_ptr) std::string precision; std::string label_file_path; std::string model_file_path; - precision = node_ptr_->declare_parameter("classifier_precision", "fp16"); - label_file_path = node_ptr_->declare_parameter("classifier_label_path", "labels.txt"); - model_file_path = node_ptr_->declare_parameter("classifier_model_path", "model.onnx"); + precision = node_ptr_->declare_parameter("classifier_precision"); + label_file_path = node_ptr_->declare_parameter("classifier_label_path"); + model_file_path = node_ptr_->declare_parameter("classifier_model_path"); // ros param does not support loading std::vector // we have to load std::vector and transfer to std::vector - auto mean_d = - node_ptr->declare_parameter("classifier_mean", std::vector{123.675, 116.28, 103.53}); - auto std_d = - node_ptr->declare_parameter("classifier_std", std::vector{58.395, 57.12, 57.375}); + auto mean_d = node_ptr->declare_parameter>("classifier_mean"); + auto std_d = node_ptr->declare_parameter>("classifier_std"); mean_ = std::vector(mean_d.begin(), mean_d.end()); std_ = std::vector(std_d.begin(), std_d.end()); if (mean_.size() != 3 || std_.size() != 3) { @@ -55,7 +53,7 @@ CNNClassifier::CNNClassifier(rclcpp::Node * node_ptr) : node_ptr_(node_ptr) classifier_ = std::make_unique( model_file_path, precision, mean_, std_); batch_size_ = classifier_->getBatchSize(); - if (node_ptr_->declare_parameter("build_only", false)) { + if (node_ptr_->declare_parameter("build_only")) { RCLCPP_INFO(node_ptr_->get_logger(), "TensorRT engine is built and shutdown node."); rclcpp::shutdown(); } diff --git a/perception/autoware_traffic_light_classifier/src/traffic_light_classifier_node.cpp b/perception/autoware_traffic_light_classifier/src/traffic_light_classifier_node.cpp index 796a144bf8266..4b03d083a271a 100644 --- a/perception/autoware_traffic_light_classifier/src/traffic_light_classifier_node.cpp +++ b/perception/autoware_traffic_light_classifier/src/traffic_light_classifier_node.cpp @@ -25,11 +25,11 @@ namespace autoware::traffic_light TrafficLightClassifierNodelet::TrafficLightClassifierNodelet(const rclcpp::NodeOptions & options) : Node("traffic_light_classifier_node", options) { - classify_traffic_light_type_ = this->declare_parameter("classify_traffic_light_type", 0); + classify_traffic_light_type_ = this->declare_parameter("classify_traffic_light_type"); using std::placeholders::_1; using std::placeholders::_2; - is_approximate_sync_ = this->declare_parameter("approximate_sync", false); + is_approximate_sync_ = this->declare_parameter("approximate_sync"); backlight_threshold_ = this->declare_parameter("backlight_threshold"); if (is_approximate_sync_) { @@ -49,8 +49,7 @@ TrafficLightClassifierNodelet::TrafficLightClassifierNodelet(const rclcpp::NodeO timer_ = rclcpp::create_timer( this, get_clock(), 100ms, std::bind(&TrafficLightClassifierNodelet::connectCb, this)); - int classifier_type = this->declare_parameter( - "classifier_type", static_cast(TrafficLightClassifierNodelet::ClassifierType::HSVFilter)); + int classifier_type = this->declare_parameter("classifier_type"); if (classifier_type == TrafficLightClassifierNodelet::ClassifierType::HSVFilter) { classifier_ptr_ = std::make_shared(this); } else if (classifier_type == TrafficLightClassifierNodelet::ClassifierType::CNN) {