diff --git a/common/motion_utils/include/motion_utils/trajectory/trajectory.hpp b/common/motion_utils/include/motion_utils/trajectory/trajectory.hpp index 2a119fa02177d..f93e758c47332 100644 --- a/common/motion_utils/include/motion_utils/trajectory/trajectory.hpp +++ b/common/motion_utils/include/motion_utils/trajectory/trajectory.hpp @@ -24,6 +24,7 @@ #include #include #include +#include #include namespace motion_utils @@ -580,6 +581,43 @@ double calcArcLength(const T & points) return calcSignedArcLength(points, 0, points.size() - 1); } +template +inline std::vector calcCurvature(const T & points) +{ + std::vector curvature_vec(points.size()); + + for (size_t i = 1; i < points.size() - 1; ++i) { + const auto p1 = tier4_autoware_utils::getPoint(points.at(i - 1)); + const auto p2 = tier4_autoware_utils::getPoint(points.at(i)); + const auto p3 = tier4_autoware_utils::getPoint(points.at(i + 1)); + curvature_vec.at(i) = (tier4_autoware_utils::calcCurvature(p1, p2, p3)); + } + curvature_vec.at(0) = curvature_vec.at(1); + curvature_vec.at(curvature_vec.size() - 1) = curvature_vec.at(curvature_vec.size() - 2); + + return curvature_vec; +} + +template +inline std::vector> calcCurvatureAndArcLength(const T & points) +{ + // Note that arclength is for the segment, not the sum. + std::vector> curvature_arc_length_vec; + curvature_arc_length_vec.push_back(std::pair(0.0, 0.0)); + for (size_t i = 1; i < points.size() - 1; ++i) { + const auto p1 = tier4_autoware_utils::getPoint(points.at(i - 1)); + const auto p2 = tier4_autoware_utils::getPoint(points.at(i)); + const auto p3 = tier4_autoware_utils::getPoint(points.at(i + 1)); + const double curvature = tier4_autoware_utils::calcCurvature(p1, p2, p3); + const double arc_length = tier4_autoware_utils::calcDistance2d(points.at(i - 1), points.at(i)) + + tier4_autoware_utils::calcDistance2d(points.at(i), points.at(i + 1)); + curvature_arc_length_vec.push_back(std::pair(curvature, arc_length)); + } + curvature_arc_length_vec.push_back(std::pair(0.0, 0.0)); + + return curvature_arc_length_vec; +} + /** * @brief Calculate distance to the forward stop point from the given src index */ diff --git a/common/motion_utils/package.xml b/common/motion_utils/package.xml index d8f6340a473e4..ee6975e0caa83 100644 --- a/common/motion_utils/package.xml +++ b/common/motion_utils/package.xml @@ -8,7 +8,11 @@ Satoshi Ota Takayuki Murooka + Fumiya Watanabe + Kosuke Takeuchi Taiki Tanaka + Takamasa Horibe + Tomoya Kimura Apache License 2.0 Yutaka Shimizu diff --git a/common/tensorrt_common/CMakeLists.txt b/common/tensorrt_common/CMakeLists.txt index 1714c5a44f7eb..395a5ee4f1941 100644 --- a/common/tensorrt_common/CMakeLists.txt +++ b/common/tensorrt_common/CMakeLists.txt @@ -4,6 +4,9 @@ project(tensorrt_common) find_package(autoware_cmake REQUIRED) autoware_package() +# TODO(tensorrt_common): Remove once upgrading to TensorRT 8.5 is complete +add_compile_options(-Wno-deprecated-declarations) + find_package(CUDA) find_package(CUDNN) find_package(TENSORRT) diff --git a/common/tvm_utility/include/tvm_utility/pipeline.hpp b/common/tvm_utility/include/tvm_utility/pipeline.hpp index 600d12e1e3ec8..c6ecd9ca15194 100644 --- a/common/tvm_utility/include/tvm_utility/pipeline.hpp +++ b/common/tvm_utility/include/tvm_utility/pipeline.hpp @@ -182,8 +182,21 @@ class Pipeline PostProcessorType post_processor_{}; }; -// Each node should be specified with a string name and a shape -using NetworkNode = std::pair>; +// NetworkNode +typedef struct +{ + // Node name + std::string node_name; + + // Network data type configurations + DLDataTypeCode tvm_dtype_code; + int32_t tvm_dtype_bits; + int32_t tvm_dtype_lanes; + + // Shape info + std::vector node_shape; +} NetworkNode; + typedef struct { // Network info @@ -196,11 +209,6 @@ typedef struct std::string network_graph_path; std::string network_params_path; - // Network data type configurations - DLDataTypeCode tvm_dtype_code; - int32_t tvm_dtype_bits; - int32_t tvm_dtype_lanes; - // Inference hardware configuration DLDeviceType tvm_device_type; int32_t tvm_device_id; @@ -278,8 +286,8 @@ class InferenceEngineTVM : public InferenceEngine for (auto & output_config : config.network_outputs) { output_.push_back(TVMArrayContainer( - output_config.second, config.tvm_dtype_code, config.tvm_dtype_bits, config.tvm_dtype_lanes, - config.tvm_device_type, config.tvm_device_id)); + output_config.node_shape, output_config.tvm_dtype_code, output_config.tvm_dtype_bits, + output_config.tvm_dtype_lanes, config.tvm_device_type, config.tvm_device_id)); } } @@ -290,7 +298,7 @@ class InferenceEngineTVM : public InferenceEngine if (input[index].getArray() == nullptr) { throw std::runtime_error("input variable is null"); } - set_input(config_.network_inputs[index].first.c_str(), input[index].getArray()); + set_input(config_.network_inputs[index].node_name.c_str(), input[index].getArray()); } // Execute the inference diff --git a/common/tvm_utility/test/yolo_v2_tiny/main.cpp b/common/tvm_utility/test/yolo_v2_tiny/main.cpp index 11acf571456b8..aac7900f423c2 100644 --- a/common/tvm_utility/test/yolo_v2_tiny/main.cpp +++ b/common/tvm_utility/test/yolo_v2_tiny/main.cpp @@ -45,18 +45,18 @@ class PreProcessorYoloV2Tiny : public tvm_utility::pipeline::PreProcessor shape_x{1, network_input_width, network_input_height, network_input_depth}; tvm_utility::pipeline::TVMArrayContainer x{ shape_x, - config.tvm_dtype_code, - config.tvm_dtype_bits, - config.tvm_dtype_lanes, + config.network_inputs[0].tvm_dtype_code, + config.network_inputs[0].tvm_dtype_bits, + config.network_inputs[0].tvm_dtype_lanes, config.tvm_device_type, config.tvm_device_id}; @@ -101,7 +101,8 @@ class PreProcessorYoloV2Tiny : public tvm_utility::pipeline::PreProcessor labels{}; std::vector> anchors{}; }; diff --git a/common/tvm_utility/tvm_utility-extras.cmake b/common/tvm_utility/tvm_utility-extras.cmake index 7affa4752b5e4..f346aa8fb4922 100644 --- a/common/tvm_utility/tvm_utility-extras.cmake +++ b/common/tvm_utility/tvm_utility-extras.cmake @@ -14,7 +14,7 @@ # Get user-provided variables set(DOWNLOAD_ARTIFACTS OFF CACHE BOOL "enable artifacts download") -set(MODELZOO_VERSION "2.1.0-20221102" CACHE STRING "targeted ModelZoo version") +set(MODELZOO_VERSION "3.0.0-20221221" CACHE STRING "targeted ModelZoo version") # # Download the selected neural network if it is not already present on disk. @@ -32,6 +32,7 @@ set(MODELZOO_VERSION "2.1.0-20221102" CACHE STRING "targeted ModelZoo version") function(get_neural_network MODEL_NAME MODEL_BACKEND DEPENDENCY) set(DATA_PATH ${CMAKE_CURRENT_SOURCE_DIR}/data) set(EXTERNALPROJECT_NAME ${MODEL_NAME}_${MODEL_BACKEND}) + set(PREPROCESSING "") # Prioritize user-provided models. if(IS_DIRECTORY "${DATA_PATH}/user/${MODEL_NAME}") @@ -42,6 +43,13 @@ function(get_neural_network MODEL_NAME MODEL_BACKEND DEPENDENCY) "${DATA_PATH}/models/${MODEL_NAME}/inference_engine_tvm_config.hpp" COPYONLY ) + if(EXISTS "${DATA_PATH}/user/${MODEL_NAME}/preprocessing_inference_engine_tvm_config.hpp") + configure_file( + "${DATA_PATH}/user/${MODEL_NAME}/preprocessing_inference_engine_tvm_config.hpp" + "${DATA_PATH}/models/${MODEL_NAME}/preprocessing_inference_engine_tvm_config.hpp" + COPYONLY + ) + endif() set(DOWNLOAD_DIR "") set(SOURCE_DIR "${DATA_PATH}/user/${MODEL_NAME}") set(INSTALL_DIRECTORY "${DATA_PATH}/user/${MODEL_NAME}") @@ -63,7 +71,9 @@ function(get_neural_network MODEL_NAME MODEL_BACKEND DEPENDENCY) set(SOURCE_DIR "${DATA_PATH}/models/${MODEL_NAME}") set(INSTALL_DIRECTORY "${DATA_PATH}/models/${MODEL_NAME}") endif() - + if(EXISTS "${DATA_PATH}/models/${MODEL_NAME}/preprocessing_inference_engine_tvm_config.hpp") + set(PREPROCESSING "${DATA_PATH}/models/${MODEL_NAME}/preprocessing_inference_engine_tvm_config.hpp") + endif() include(ExternalProject) externalproject_add(${EXTERNALPROJECT_NAME} DOWNLOAD_DIR ${DOWNLOAD_DIR} @@ -72,6 +82,7 @@ function(get_neural_network MODEL_NAME MODEL_BACKEND DEPENDENCY) CONFIGURE_COMMAND "" BUILD_COMMAND "" BUILD_BYPRODUCTS "${DATA_PATH}/models/${MODEL_NAME}/inference_engine_tvm_config.hpp" + BUILD_BYPRODUCTS ${PREPROCESSING} INSTALL_COMMAND "" ) install( diff --git a/launch/tier4_map_launch/CMakeLists.txt b/launch/tier4_map_launch/CMakeLists.txt index 98f12b64f3ca4..b0e770d7c7619 100644 --- a/launch/tier4_map_launch/CMakeLists.txt +++ b/launch/tier4_map_launch/CMakeLists.txt @@ -7,4 +7,5 @@ autoware_package() ament_auto_package( INSTALL_TO_SHARE launch + config ) diff --git a/launch/tier4_map_launch/config/pointcloud_map_loader.param.yaml b/launch/tier4_map_launch/config/pointcloud_map_loader.param.yaml new file mode 100644 index 0000000000000..8f3ccbff00360 --- /dev/null +++ b/launch/tier4_map_launch/config/pointcloud_map_loader.param.yaml @@ -0,0 +1,9 @@ +/**: + ros__parameters: + enable_whole_load: true + enable_downsampled_whole_load: false + enable_partial_load: false + enable_differential_load: false + + # only used when downsample_whole_load enabled + leaf_size: 3.0 # downsample leaf size [m] diff --git a/launch/tier4_map_launch/launch/map.launch.py b/launch/tier4_map_launch/launch/map.launch.py index 231a805603a44..3be3082e58040 100644 --- a/launch/tier4_map_launch/launch/map.launch.py +++ b/launch/tier4_map_launch/launch/map.launch.py @@ -161,7 +161,11 @@ def add_launch_arg(name: str, default_value=None, description=None): ), add_launch_arg( "pointcloud_map_loader_param_path", - "", + [ + FindPackageShare("tier4_map_launch"), + "/config/pointcloud_map_loader.param.yaml", + # ToDo(kminoda): This file should eventually be removed as well as the other components + ], "path to pointcloud_map_loader param file", ), add_launch_arg("use_intra_process", "false", "use ROS2 component container communication"), diff --git a/launch/tier4_map_launch/launch/map.launch.xml b/launch/tier4_map_launch/launch/map.launch.xml index 7ecc61605eeac..39a398e7c1d77 100644 --- a/launch/tier4_map_launch/launch/map.launch.xml +++ b/launch/tier4_map_launch/launch/map.launch.xml @@ -1,6 +1,6 @@ - + diff --git a/launch/tier4_perception_launch/launch/object_recognition/detection/camera_lidar_fusion_based_detection.launch.xml b/launch/tier4_perception_launch/launch/object_recognition/detection/camera_lidar_fusion_based_detection.launch.xml index 74d4cbe98de44..9dca7a7767321 100644 --- a/launch/tier4_perception_launch/launch/object_recognition/detection/camera_lidar_fusion_based_detection.launch.xml +++ b/launch/tier4_perception_launch/launch/object_recognition/detection/camera_lidar_fusion_based_detection.launch.xml @@ -246,6 +246,7 @@ + diff --git a/launch/tier4_planning_launch/config/scenario_planning/lane_driving/behavior_planning/behavior_velocity_planner/intersection.param.yaml b/launch/tier4_planning_launch/config/scenario_planning/lane_driving/behavior_planning/behavior_velocity_planner/intersection.param.yaml deleted file mode 100644 index 220c087d18de0..0000000000000 --- a/launch/tier4_planning_launch/config/scenario_planning/lane_driving/behavior_planning/behavior_velocity_planner/intersection.param.yaml +++ /dev/null @@ -1,26 +0,0 @@ -/**: - ros__parameters: - intersection: - state_transit_margin_time: 0.5 - stop_line_margin: 3.0 - keep_detection_line_margin: 1.0 # distance (toward path end) from generated stop line. keep detection if ego is before this line and ego.vel < keep_detection_vel_thr - keep_detection_vel_thr: 0.833 # == 3.0km/h - stuck_vehicle_detect_dist: 3.0 # this should be the length between cars when they are stopped. The actual stuck vehicle detection length will be this value + vehicle_length. - stuck_vehicle_ignore_dist: 10.0 # obstacle stop max distance(5.0m) + stuck vehicle size / 2 (0.0m-) - stuck_vehicle_vel_thr: 0.833 # 0.833m/s = 3.0km/h - intersection_velocity: 2.778 # 2.778m/s = 10.0km/h - intersection_max_accel: 0.5 # m/ss - detection_area_margin: 0.5 # [m] - detection_area_right_margin: 0.5 # [m] - detection_area_left_margin: 0.5 # [m] - detection_area_length: 200.0 # [m] - detection_area_angle_threshold: 0.785 # [rad] - min_predicted_path_confidence: 0.05 - collision_start_margin_time: 5.0 # [s] - collision_end_margin_time: 2.0 # [s] - use_stuck_stopline: false # stopline generate before the intersection lanelet when is_stuck - assumed_front_car_decel: 1.0 # [m/ss] the expected deceleration of front car when front car as well as ego are turning - enable_front_car_decel_prediction: false # By default this feature is disabled - - merge_from_private_road: - stop_duration_sec: 1.0 diff --git a/localization/gyro_odometer/src/gyro_odometer_core.cpp b/localization/gyro_odometer/src/gyro_odometer_core.cpp index b4a3c7118e051..b413779705af9 100644 --- a/localization/gyro_odometer/src/gyro_odometer_core.cpp +++ b/localization/gyro_odometer/src/gyro_odometer_core.cpp @@ -80,6 +80,7 @@ geometry_msgs::msg::TwistWithCovarianceStamped concatGyroAndOdometer( } else { twist_with_cov.header.stamp = latest_vehicle_twist_stamp; } + twist_with_cov.header.frame_id = gyro_queue.front().header.frame_id; twist_with_cov.twist.twist.linear.x = vx_mean; twist_with_cov.twist.twist.angular = gyro_mean; @@ -211,6 +212,7 @@ void GyroOdometer::callbackImu(const sensor_msgs::msg::Imu::ConstSharedPtr imu_m sensor_msgs::msg::Imu gyro_base_link; gyro_base_link.header = imu_msg_ptr->header; + gyro_base_link.header.frame_id = output_frame_; gyro_base_link.angular_velocity = transformed_angular_velocity.vector; gyro_base_link.angular_velocity_covariance = transformCovariance(imu_msg_ptr->angular_velocity_covariance); diff --git a/perception/lidar_apollo_segmentation_tvm/include/lidar_apollo_segmentation_tvm/lidar_apollo_segmentation_tvm.hpp b/perception/lidar_apollo_segmentation_tvm/include/lidar_apollo_segmentation_tvm/lidar_apollo_segmentation_tvm.hpp index abe1325eb1419..089381ab51c61 100644 --- a/perception/lidar_apollo_segmentation_tvm/include/lidar_apollo_segmentation_tvm/lidar_apollo_segmentation_tvm.hpp +++ b/perception/lidar_apollo_segmentation_tvm/include/lidar_apollo_segmentation_tvm/lidar_apollo_segmentation_tvm.hpp @@ -79,7 +79,7 @@ class LIDAR_APOLLO_SEGMENTATION_TVM_LOCAL ApolloLidarSegmentationPreProcessor const int64_t input_channels; const int64_t input_width; const int64_t input_height; - const int64_t datatype_bytes; + const int64_t input_datatype_bytes; const std::shared_ptr feature_generator; TVMArrayContainer output; }; @@ -115,7 +115,7 @@ class LIDAR_APOLLO_SEGMENTATION_TVM_LOCAL ApolloLidarSegmentationPostProcessor const int64_t output_channels; const int64_t output_width; const int64_t output_height; - const int64_t datatype_bytes; + const int64_t output_datatype_bytes; const float32_t objectness_thresh_; const float32_t score_threshold_; const float32_t height_thresh_; diff --git a/perception/lidar_apollo_segmentation_tvm/src/lidar_apollo_segmentation_tvm.cpp b/perception/lidar_apollo_segmentation_tvm/src/lidar_apollo_segmentation_tvm.cpp index 261e57cb894eb..49ecbae311b58 100644 --- a/perception/lidar_apollo_segmentation_tvm/src/lidar_apollo_segmentation_tvm.cpp +++ b/perception/lidar_apollo_segmentation_tvm/src/lidar_apollo_segmentation_tvm.cpp @@ -36,10 +36,10 @@ ApolloLidarSegmentationPreProcessor::ApolloLidarSegmentationPreProcessor( const tvm_utility::pipeline::InferenceEngineTVMConfig & config, int32_t range, bool8_t use_intensity_feature, bool8_t use_constant_feature, float32_t min_height, float32_t max_height) -: input_channels(config.network_inputs[0].second[1]), - input_width(config.network_inputs[0].second[2]), - input_height(config.network_inputs[0].second[3]), - datatype_bytes(config.tvm_dtype_bits / 8), +: input_channels(config.network_inputs[0].node_shape[1]), + input_width(config.network_inputs[0].node_shape[2]), + input_height(config.network_inputs[0].node_shape[3]), + input_datatype_bytes(config.network_inputs[0].tvm_dtype_bits / 8), feature_generator(std::make_shared( input_width, input_height, range, use_intensity_feature, use_constant_feature, min_height, max_height)) @@ -48,9 +48,9 @@ ApolloLidarSegmentationPreProcessor::ApolloLidarSegmentationPreProcessor( std::vector shape_x{1, input_channels, input_width, input_height}; TVMArrayContainer x{ shape_x, - config.tvm_dtype_code, - config.tvm_dtype_bits, - config.tvm_dtype_lanes, + config.network_inputs[0].tvm_dtype_code, + config.network_inputs[0].tvm_dtype_bits, + config.network_inputs[0].tvm_dtype_lanes, config.tvm_device_type, config.tvm_device_id}; output = x; @@ -68,7 +68,7 @@ TVMArrayContainerVector ApolloLidarSegmentationPreProcessor::schedule( TVMArrayCopyFromBytes( output.getArray(), feature_map_ptr->map_data.data(), - input_channels * input_height * input_width * datatype_bytes); + input_channels * input_height * input_width * input_datatype_bytes); return {output}; } @@ -78,10 +78,10 @@ ApolloLidarSegmentationPostProcessor::ApolloLidarSegmentationPostProcessor( const pcl::PointCloud::ConstPtr & pc_ptr, int32_t range, float32_t objectness_thresh, float32_t score_threshold, float32_t height_thresh, int32_t min_pts_num) -: output_channels(config.network_outputs[0].second[1]), - output_width(config.network_outputs[0].second[2]), - output_height(config.network_outputs[0].second[3]), - datatype_bytes(config.tvm_dtype_bits / 8), +: output_channels(config.network_outputs[0].node_shape[1]), + output_width(config.network_outputs[0].node_shape[2]), + output_height(config.network_outputs[0].node_shape[3]), + output_datatype_bytes(config.network_outputs[0].tvm_dtype_bits / 8), objectness_thresh_(objectness_thresh), score_threshold_(score_threshold), height_thresh_(height_thresh), @@ -100,7 +100,7 @@ std::shared_ptr ApolloLidarSegmentationPostProcessor std::vector feature(output_channels * output_width * output_height, 0); TVMArrayCopyToBytes( input[0].getArray(), feature.data(), - output_channels * output_width * output_height * datatype_bytes); + output_channels * output_width * output_height * output_datatype_bytes); cluster2d_->cluster( feature.data(), pc_ptr_, valid_idx, objectness_thresh_, true /*use all grids for clustering*/); auto object_array = cluster2d_->getObjects(score_threshold_, height_thresh_, min_pts_num_); diff --git a/perception/lidar_centerpoint/CMakeLists.txt b/perception/lidar_centerpoint/CMakeLists.txt index 2591266652827..f2935ae98a4fa 100644 --- a/perception/lidar_centerpoint/CMakeLists.txt +++ b/perception/lidar_centerpoint/CMakeLists.txt @@ -4,6 +4,9 @@ project(lidar_centerpoint) find_package(autoware_cmake REQUIRED) autoware_package() +# TODO(lidar_centerpoint): Remove once upgrading to TensorRT 8.5 is complete +add_compile_options(-Wno-deprecated-declarations) + option(CUDA_VERBOSE "Verbose output of CUDA modules" OFF) # set flags for CUDA availability @@ -107,12 +110,12 @@ if(TRT_AVAIL AND CUDA_AVAIL AND CUDNN_AVAIL) endfunction() # centerpoint - download(v1 pts_voxel_encoder_centerpoint.onnx 94dcf01ccd53ef14a79787afaada63b4) - download(v1 pts_backbone_neck_head_centerpoint.onnx 7046995e502e11f35c9459668d8afde3) + download(v2 pts_voxel_encoder_centerpoint.onnx e8369d7e0f59951cf87c0274830de1fe) + download(v2 pts_backbone_neck_head_centerpoint.onnx 44e44d26781a69c741bf9bddde57fbb3) # centerpoint_tiny - download(v1 pts_voxel_encoder_centerpoint_tiny.onnx 410f730c537968cb27fbd70c941849a8) - download(v1 pts_backbone_neck_head_centerpoint_tiny.onnx e97c165c7877222c0e27e44409a07517) + download(v2 pts_voxel_encoder_centerpoint_tiny.onnx 5fb774fac44dc83949d05eb3ba077309) + download(v2 pts_backbone_neck_head_centerpoint_tiny.onnx e4658325b70222f7c3637fe00e586b82) find_package(ament_cmake_auto REQUIRED) ament_auto_find_build_dependencies() diff --git a/perception/lidar_centerpoint/README.md b/perception/lidar_centerpoint/README.md index 777b3fc191111..baaea06e59346 100644 --- a/perception/lidar_centerpoint/README.md +++ b/perception/lidar_centerpoint/README.md @@ -52,6 +52,12 @@ We trained the models using . You can download the onnx format of trained models by clicking on the links below. +- Centerpoint : [pts_voxel_encoder_centerpoint.onnx](https://awf.ml.dev.web.auto/perception/models/centerpoint/v2/pts_voxel_encoder_centerpoint.onnx), [pts_backbone_neck_head_centerpoint.onnx](https://awf.ml.dev.web.auto/perception/models/centerpoint/v2/pts_backbone_neck_head_centerpoint.onnx) +- Centerpoint tiny: [pts_voxel_encoder_centerpoint_tiny.onnx](https://awf.ml.dev.web.auto/perception/models/centerpoint/v2/pts_voxel_encoder_centerpoint_tiny.onnx), [pts_backbone_neck_head_centerpoint_tiny.onnx](https://awf.ml.dev.web.auto/perception/models/centerpoint/v2/pts_backbone_neck_head_centerpoint_tiny.onnx) + +`Centerpoint` was trained in `nuScenes` (~110k lidar frames) [8] and TIER IV's internal database (~11k lidar frames) for 60 epochs. +`Centerpoint tiny` was trained in `Argoverse 2` (~28k lidar frames) [9] and TIER IV's internal database (~11k lidar frames) for 20 epochs. + ## Standalone inference and visualization In addition to its use as a standard ROS node, `lidar_centerpoint` can also be used to perform inferences in an isolated manner. @@ -119,6 +125,10 @@ Example: [7] +[8] + +[9] + ## (Optional) Future extensions / Unimplemented parts Mamoru Sobue + Takayuki Murooka Satoshi Ota diff --git a/planning/behavior_velocity_planner/src/node.cpp b/planning/behavior_velocity_planner/src/node.cpp index 5fea6f0a13e82..ab5d31e01c224 100644 --- a/planning/behavior_velocity_planner/src/node.cpp +++ b/planning/behavior_velocity_planner/src/node.cpp @@ -238,12 +238,7 @@ bool BehaviorVelocityPlannerNode::isDataReady( RCLCPP_INFO_THROTTLE(get_logger(), clock, 3000, "Waiting for pointcloud"); return false; } - if (!d.route_handler_) { - RCLCPP_INFO_THROTTLE( - get_logger(), clock, 3000, "Waiting for the initialization of route_handler"); - return false; - } - if (!d.route_handler_->isMapMsgReady()) { + if (!map_ptr_) { RCLCPP_INFO_THROTTLE(get_logger(), clock, 3000, "Waiting for the initialization of map"); return false; } @@ -344,8 +339,8 @@ void BehaviorVelocityPlannerNode::onLaneletMap( { std::lock_guard lock(mutex_); - // Load map - planner_data_.route_handler_ = std::make_shared(*msg); + map_ptr_ = msg; + has_received_map_ = true; } void BehaviorVelocityPlannerNode::onTrafficSignals( @@ -419,6 +414,18 @@ void BehaviorVelocityPlannerNode::onTrigger( return; } + // Load map and check route handler + if (has_received_map_) { + planner_data_.route_handler_ = std::make_shared(*map_ptr_); + has_received_map_ = false; + } + if (!planner_data_.route_handler_) { + RCLCPP_INFO_THROTTLE( + get_logger(), *get_clock(), 3000, "Waiting for the initialization of route_handler"); + mutex_.unlock(); + return; + } + // NOTE: planner_data must not be referenced for multithreading const auto planner_data = planner_data_; mutex_.unlock(); diff --git a/planning/behavior_velocity_planner/src/scene_module/intersection/scene_intersection.cpp b/planning/behavior_velocity_planner/src/scene_module/intersection/scene_intersection.cpp index 01a38f42f4d49..7485683a79176 100644 --- a/planning/behavior_velocity_planner/src/scene_module/intersection/scene_intersection.cpp +++ b/planning/behavior_velocity_planner/src/scene_module/intersection/scene_intersection.cpp @@ -150,6 +150,33 @@ bool IntersectionModule::modifyPathVelocity(PathWithLaneId * path, StopReason * } const size_t closest_idx = closest_idx_opt.get(); + if (stop_lines_idx_opt.has_value()) { + const auto stop_line_idx = stop_lines_idx_opt.value().stop_line; + const auto pass_judge_line_idx = stop_lines_idx_opt.value().pass_judge_line; + const auto keep_detection_line_idx = stop_lines_idx_opt.value().keep_detection_line; + + const bool is_over_pass_judge_line = + util::isOverTargetIndex(*path, closest_idx, current_pose.pose, pass_judge_line_idx); + const bool is_before_keep_detection_line = + stop_lines_idx_opt.has_value() + ? util::isBeforeTargetIndex(*path, closest_idx, current_pose.pose, keep_detection_line_idx) + : false; + const bool keep_detection = is_before_keep_detection_line && + std::fabs(current_vel) < planner_param_.keep_detection_vel_thr; + + if (is_over_pass_judge_line && keep_detection) { + // in case ego could not stop exactly before the stop line, but with some overshoot, + // keep detection within some margin under low velocity threshold + } else if (is_over_pass_judge_line && is_go_out_ && !external_stop) { + RCLCPP_INFO(logger_, "over the keep_detection line and not low speed. no plan needed."); + RCLCPP_DEBUG(logger_, "===== plan end ====="); + setSafe(true); + setDistance(motion_utils::calcSignedArcLength( + path->points, planner_data_->current_pose.pose.position, + path->points.at(stop_line_idx).point.pose.position)); + return true; + } + } /* collision checking */ bool is_entry_prohibited = false; @@ -171,8 +198,10 @@ bool IntersectionModule::modifyPathVelocity(PathWithLaneId * path, StopReason * closest_idx, stuck_vehicle_detect_area); /* calculate final stop lines */ - int stop_line_idx_final = -1; - int pass_judge_line_idx_final = -1; + int stop_line_idx_final = + stop_lines_idx_opt.has_value() ? stop_lines_idx_opt.value().stop_line : -1; + int pass_judge_line_idx_final = + stop_lines_idx_opt.has_value() ? stop_lines_idx_opt.value().pass_judge_line : -1; if (external_go) { is_entry_prohibited = false; } else if (external_stop) { @@ -204,29 +233,6 @@ bool IntersectionModule::modifyPathVelocity(PathWithLaneId * path, StopReason * return false; } - const bool is_over_pass_judge_line = - util::isOverTargetIndex(*path, closest_idx, current_pose.pose, pass_judge_line_idx_final); - const bool is_before_keep_detection_line = - stop_lines_idx_opt.has_value() - ? util::isBeforeTargetIndex( - *path, closest_idx, current_pose.pose, stop_lines_idx_opt.value().keep_detection_line) - : false; - const bool keep_detection = - is_before_keep_detection_line && std::fabs(current_vel) < planner_param_.keep_detection_vel_thr; - - if (is_over_pass_judge_line && keep_detection) { - // in case ego could not stop exactly before the stop line, but with some overshoot, - // keep detection within some margin under low velocity threshold - } else if (is_over_pass_judge_line && is_go_out_ && !external_stop) { - RCLCPP_DEBUG(logger_, "over the keep_detection line and not low speed. no plan needed."); - RCLCPP_DEBUG(logger_, "===== plan end ====="); - setSafe(true); - setDistance(motion_utils::calcSignedArcLength( - path->points, planner_data_->current_pose.pose.position, - path->points.at(stop_line_idx_final).point.pose.position)); - return true; - } - state_machine_.setStateWithMarginTime( is_entry_prohibited ? StateMachine::State::STOP : StateMachine::State::GO, logger_.get_child("state_machine"), *clock_); diff --git a/planning/mission_planner/src/lanelet2_plugins/default_planner.cpp b/planning/mission_planner/src/lanelet2_plugins/default_planner.cpp index b741bd4479763..55ed5a4cf57b0 100644 --- a/planning/mission_planner/src/lanelet2_plugins/default_planner.cpp +++ b/planning/mission_planner/src/lanelet2_plugins/default_planner.cpp @@ -272,6 +272,25 @@ bool DefaultPlanner::is_goal_valid( const geometry_msgs::msg::Pose & goal, lanelet::ConstLanelets path_lanelets) { const auto logger = node_->get_logger(); + + const auto goal_lanelet_pt = lanelet::utils::conversion::toLaneletPoint(goal.position); + + // check if goal is in shoulder lanelet + lanelet::Lanelet closest_shoulder_lanelet; + if (lanelet::utils::query::getClosestLanelet( + shoulder_lanelets_, goal, &closest_shoulder_lanelet)) { + if (is_in_lane(closest_shoulder_lanelet, goal_lanelet_pt)) { + const auto lane_yaw = + lanelet::utils::getLaneletAngle(closest_shoulder_lanelet, goal.position); + const auto goal_yaw = tf2::getYaw(goal.orientation); + const auto angle_diff = tier4_autoware_utils::normalizeRadian(lane_yaw - goal_yaw); + const double th_angle = tier4_autoware_utils::deg2rad(param_.goal_angle_threshold_deg); + if (std::abs(angle_diff) < th_angle) { + return true; + } + } + } + lanelet::Lanelet closest_lanelet; if (!lanelet::utils::query::getClosestLanelet(road_lanelets_, goal, &closest_lanelet)) { return false; @@ -298,8 +317,6 @@ bool DefaultPlanner::is_goal_valid( return false; } - const auto goal_lanelet_pt = lanelet::utils::conversion::toLaneletPoint(goal.position); - if (is_in_lane(closest_lanelet, goal_lanelet_pt)) { const auto lane_yaw = lanelet::utils::getLaneletAngle(closest_lanelet, goal.position); const auto goal_yaw = tf2::getYaw(goal.orientation); @@ -323,23 +340,6 @@ bool DefaultPlanner::is_goal_valid( return true; } - // check if goal is in shoulder lanelet - lanelet::Lanelet closest_shoulder_lanelet; - if (!lanelet::utils::query::getClosestLanelet( - shoulder_lanelets_, goal, &closest_shoulder_lanelet)) { - return false; - } - // check if goal pose is in shoulder lane - if (is_in_lane(closest_shoulder_lanelet, goal_lanelet_pt)) { - const auto lane_yaw = lanelet::utils::getLaneletAngle(closest_shoulder_lanelet, goal.position); - const auto goal_yaw = tf2::getYaw(goal.orientation); - const auto angle_diff = tier4_autoware_utils::normalizeRadian(lane_yaw - goal_yaw); - - const double th_angle = tier4_autoware_utils::deg2rad(param_.goal_angle_threshold_deg); - if (std::abs(angle_diff) < th_angle) { - return true; - } - } return false; } diff --git a/planning/planning_debug_tools/include/planning_debug_tools/trajectory_analyzer.hpp b/planning/planning_debug_tools/include/planning_debug_tools/trajectory_analyzer.hpp index e3e4c6d75d6ed..a70ab56d98a94 100644 --- a/planning/planning_debug_tools/include/planning_debug_tools/trajectory_analyzer.hpp +++ b/planning/planning_debug_tools/include/planning_debug_tools/trajectory_analyzer.hpp @@ -87,7 +87,7 @@ class TrajectoryAnalyzer TrajectoryDebugInfo data; data.stamp = node_->now(); data.size = points.size(); - data.curvature = calcCurvature(points); + data.curvature = motion_utils::calcCurvature(points); const auto arclength_offset = motion_utils::calcSignedArcLength(points, 0, ego_p); data.arclength = calcPathArcLengthArray(points, -arclength_offset); data.velocity = getVelocityArray(points); diff --git a/planning/planning_debug_tools/include/planning_debug_tools/util.hpp b/planning/planning_debug_tools/include/planning_debug_tools/util.hpp index e3b4ab7639cb6..96488fbde4b96 100644 --- a/planning/planning_debug_tools/include/planning_debug_tools/util.hpp +++ b/planning/planning_debug_tools/include/planning_debug_tools/util.hpp @@ -114,21 +114,6 @@ std::vector calcPathArcLengthArray(const T & points, const double offset return out; } -template -inline std::vector calcCurvature(const T & points) -{ - std::vector curvature_arr; - curvature_arr.push_back(0.0); - for (size_t i = 1; i < points.size() - 1; ++i) { - const auto p1 = getPoint(points.at(i - 1)); - const auto p2 = getPoint(points.at(i)); - const auto p3 = getPoint(points.at(i + 1)); - curvature_arr.push_back(tier4_autoware_utils::calcCurvature(p1, p2, p3)); - } - curvature_arr.push_back(0.0); - return curvature_arr; -} - } // namespace planning_debug_tools #endif // PLANNING_DEBUG_TOOLS__UTIL_HPP_ diff --git a/system/system_monitor/launch/system_monitor.launch.xml b/system/system_monitor/launch/system_monitor.launch.xml index 0d8a14981bd8a..c6d29dd6b772f 100644 --- a/system/system_monitor/launch/system_monitor.launch.xml +++ b/system/system_monitor/launch/system_monitor.launch.xml @@ -1,12 +1,12 @@ - - - - - - - - + + + + + + + + diff --git a/vehicle/accel_brake_map_calibrator/accel_brake_map_calibrator/launch/accel_brake_map_calibrator.launch.xml b/vehicle/accel_brake_map_calibrator/accel_brake_map_calibrator/launch/accel_brake_map_calibrator.launch.xml index c3379ee67f7ca..69a65a8d540a2 100644 --- a/vehicle/accel_brake_map_calibrator/accel_brake_map_calibrator/launch/accel_brake_map_calibrator.launch.xml +++ b/vehicle/accel_brake_map_calibrator/accel_brake_map_calibrator/launch/accel_brake_map_calibrator.launch.xml @@ -8,6 +8,7 @@ + @@ -25,7 +26,9 @@ - + + + diff --git a/vehicle/accel_brake_map_calibrator/accel_brake_map_calibrator/scripts/new_accel_brake_map_server.py b/vehicle/accel_brake_map_calibrator/accel_brake_map_calibrator/scripts/new_accel_brake_map_server.py index ca65b332093a7..9b5cfc9186495 100755 --- a/vehicle/accel_brake_map_calibrator/accel_brake_map_calibrator/scripts/new_accel_brake_map_server.py +++ b/vehicle/accel_brake_map_calibrator/accel_brake_map_calibrator/scripts/new_accel_brake_map_server.py @@ -35,15 +35,43 @@ class DrawGraph(Node): def __init__(self): super().__init__("plot_server") + self.srv = self.create_service( CalibData, "/accel_brake_map_calibrator/get_data_service", self.get_data_callback ) - package_path = get_package_share_directory("accel_brake_map_calibrator") default_map_path = get_package_share_directory("raw_vehicle_cmd_converter") + self.declare_parameter( + "/accel_brake_map_calibrator/csv_default_map_dir", default_map_path + "/data/default/" + ) + self.default_map_dir = ( + self.get_parameter("/accel_brake_map_calibrator/csv_default_map_dir") + .get_parameter_value() + .string_value + ) + + package_path = get_package_share_directory("accel_brake_map_calibrator") + self.declare_parameter( + "/accel_brake_map_calibrator/csv_calibrated_map_dir", package_path + "/config/" + ) + self.calibrated_map_dir = ( + self.get_parameter("/accel_brake_map_calibrator/csv_calibrated_map_dir") + .get_parameter_value() + .string_value + ) + + self.declare_parameter("calibration_method", "each_cell") + self.calibration_method = ( + self.get_parameter("calibration_method").get_parameter_value().string_value + ) + if self.calibration_method is None: + self.calibration_method = "each_cell" + elif not ( + (self.calibration_method == "each_cell") | (self.calibration_method == "four_cell") + ): + print("invalid method.") + self.calibration_method = "each_cell" - self.default_map_dir = default_map_path + "/data/default/" - self.calibrated_map_dir = package_path + "/config/" self.log_file = package_path + "/config/log.csv" config_file = package_path + "/config/accel_brake_map_calibrator.param.yaml" @@ -71,6 +99,7 @@ def __init__(self): # debug self.get_logger().info("default map dir: {}".format(self.default_map_dir)) self.get_logger().info("calibrated map dir: {}".format(self.calibrated_map_dir)) + self.get_logger().info("calibrated method: {}".format(self.calibration_method)) self.get_logger().info("log file :{}".format(self.log_file)) self.get_logger().info("min_vel_thr : {}".format(self.min_vel_thr)) self.get_logger().info("vel_diff_thr : {}".format(self.vel_diff_thr)) @@ -82,7 +111,7 @@ def __init__(self): def get_data_callback(self, request, response): # read csv - # If log file doesn't exsist, return empty data + # If log file doesn't exist, return empty data if not Path(self.log_file).exists(): response.graph_image = [] self.get_logger().info("svg data is empty") @@ -123,6 +152,7 @@ def get_data_callback(self, request, response): self.vel_diff_thr, CF.PEDAL_LIST, self.pedal_diff_thr, + self.calibration_method, ) count_map, average_map, stddev_map = CalcUtils.create_stat_map(data)