From 0acb2e9a5d4ee578bbab4fe35a3e6bb206f98915 Mon Sep 17 00:00:00 2001 From: Takayuki Murooka Date: Wed, 9 Aug 2023 02:56:23 +0900 Subject: [PATCH 1/4] refactor perception_reproducer Signed-off-by: Takayuki Murooka --- .../perception_replayer_common.py | 122 ++++++++++++ .../perception_reproducer.py | 179 ++---------------- .../scripts/perception_replayer/utils.py | 85 +++++++++ 3 files changed, 218 insertions(+), 168 deletions(-) create mode 100755 planning/planning_debug_tools/scripts/perception_replayer/perception_replayer_common.py rename planning/planning_debug_tools/scripts/{ => perception_replayer}/perception_reproducer.py (51%) mode change 100755 => 100644 create mode 100644 planning/planning_debug_tools/scripts/perception_replayer/utils.py diff --git a/planning/planning_debug_tools/scripts/perception_replayer/perception_replayer_common.py b/planning/planning_debug_tools/scripts/perception_replayer/perception_replayer_common.py new file mode 100755 index 0000000000000..07c153cb3f614 --- /dev/null +++ b/planning/planning_debug_tools/scripts/perception_replayer/perception_replayer_common.py @@ -0,0 +1,122 @@ +#!/usr/bin/env python3 + +# Copyright 2023 TIER IV, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +from subprocess import CalledProcessError +from subprocess import check_output +import time + +from autoware_auto_perception_msgs.msg import DetectedObjects +from autoware_auto_perception_msgs.msg import PredictedObjects +from autoware_auto_perception_msgs.msg import TrackedObjects +from autoware_auto_perception_msgs.msg import TrafficSignalArray +import psutil +from rclpy.node import Node +from rclpy.serialization import deserialize_message +from rosbag2_py import StorageFilter +from rosidl_runtime_py.utilities import get_message +from sensor_msgs.msg import PointCloud2 +from utils import open_reader + + +class PerceptionReplayerCommon(Node): + def __init__(self, args, name): + super().__init__(name) + self.args = args + + # publisher + if self.args.detected_object: + self.objects_pub = self.create_publisher( + DetectedObjects, "/perception/object_recognition/detection/objects", 1 + ) + elif self.args.tracked_object: + self.objects_pub = self.create_publisher( + TrackedObjects, "/perception/object_recognition/tracking/objects", 1 + ) + else: + self.objects_pub = self.create_publisher( + PredictedObjects, "/perception/object_recognition/objects", 1 + ) + + self.pointcloud_pub = self.create_publisher( + PointCloud2, "/perception/obstacle_segmentation/pointcloud", 1 + ) + self.traffic_signals_pub = self.create_publisher( + TrafficSignalArray, "/perception/traffic_light_recognition/traffic_signals", 1 + ) + + self.rosbag_objects_data = [] + self.rosbag_ego_odom_data = [] + self.rosbag_traffic_signals_data = [] + + # load rosbag + print("Stared loading rosbag") + if os.path.isdir(args.bag): + for bag_file in sorted(os.listdir(args.bag)): + self.load_rosbag(args.bag + "/" + bag_file) + else: + self.load_rosbag(args.bag) + print("Ended loading rosbag") + + # wait for ready to publish/subscribe + time.sleep(1.0) + + def load_rosbag(self, rosbag2_path: str): + reader = open_reader(str(rosbag2_path)) + + topic_types = reader.get_all_topics_and_types() + # Create a map for quicker lookup + type_map = {topic_types[i].name: topic_types[i].type for i in range(len(topic_types))} + + objects_topic = ( + "/perception/object_recognition/detection/objects" + if self.args.detected_object + else "/perception/object_recognition/tracking/objects" + if self.args.tracked_object + else "/perception/object_recognition/objects" + ) + ego_odom_topic = "/localization/kinematic_state" + traffic_signals_topic = "/perception/traffic_light_recognition/traffic_signals" + topic_filter = StorageFilter(topics=[objects_topic, ego_odom_topic, traffic_signals_topic]) + reader.set_filter(topic_filter) + + while reader.has_next(): + (topic, data, stamp) = reader.read_next() + msg_type = get_message(type_map[topic]) + msg = deserialize_message(data, msg_type) + if topic == objects_topic: + self.rosbag_objects_data.append((stamp, msg)) + if topic == ego_odom_topic: + self.rosbag_ego_odom_data.append((stamp, msg)) + if topic == traffic_signals_topic: + self.rosbag_traffic_signals_data.append((stamp, msg)) + + def kill_online_perception_node(self): + # kill node if required + kill_process_name = None + if self.args.detected_object: + kill_process_name = "dummy_perception_publisher_node" + elif self.args.tracked_object: + kill_process_name = "multi_object_tracker" + else: + kill_process_name = "map_based_prediction" + if kill_process_name: + try: + pid = check_output(["pidof", kill_process_name]) + process = psutil.Process(int(pid[:-1])) + process.terminate() + except CalledProcessError: + pass diff --git a/planning/planning_debug_tools/scripts/perception_reproducer.py b/planning/planning_debug_tools/scripts/perception_replayer/perception_reproducer.py old mode 100755 new mode 100644 similarity index 51% rename from planning/planning_debug_tools/scripts/perception_reproducer.py rename to planning/planning_debug_tools/scripts/perception_replayer/perception_reproducer.py index 5fc367665e256..216f095472aa5 --- a/planning/planning_debug_tools/scripts/perception_reproducer.py +++ b/planning/planning_debug_tools/scripts/perception_replayer/perception_reproducer.py @@ -17,144 +17,31 @@ import argparse import copy import math -import os -from subprocess import CalledProcessError -from subprocess import check_output -import time -from autoware_auto_perception_msgs.msg import DetectedObjects -from autoware_auto_perception_msgs.msg import PredictedObjects -from autoware_auto_perception_msgs.msg import TrackedObjects -from autoware_auto_perception_msgs.msg import TrafficSignalArray -from geometry_msgs.msg import Quaternion from nav_msgs.msg import Odometry import numpy as np -import psutil +from perception_replayer_common import PerceptionReplayerCommon import rclpy -from rclpy.node import Node -from rclpy.serialization import deserialize_message -import rosbag2_py -from rosbag2_py import StorageFilter -from rosidl_runtime_py.utilities import get_message -from sensor_msgs.msg import PointCloud2 -from sensor_msgs.msg import PointField -from tf_transformations import euler_from_quaternion -from tf_transformations import quaternion_from_euler +from utils import calc_squared_distance +from utils import create_empty_pointcloud +from utils import get_quaternion_from_yaw +from utils import get_yaw_from_quaternion -def get_rosbag_options(path, serialization_format="cdr"): - storage_options = rosbag2_py.StorageOptions(uri=path, storage_id="sqlite3") - - converter_options = rosbag2_py.ConverterOptions( - input_serialization_format=serialization_format, - output_serialization_format=serialization_format, - ) - - return storage_options, converter_options - - -def open_reader(path: str): - storage_options, converter_options = get_rosbag_options(path) - reader = rosbag2_py.SequentialReader() - reader.open(storage_options, converter_options) - return reader - - -def calc_squared_distance(p1, p2): - return math.sqrt((p1.x - p2.x) ** 2 + (p1.y - p2.y) ** 2) - - -def create_empty_pointcloud(timestamp): - pointcloud_msg = PointCloud2() - pointcloud_msg.header.stamp = timestamp - pointcloud_msg.header.frame_id = "map" - pointcloud_msg.height = 1 - pointcloud_msg.is_dense = True - pointcloud_msg.point_step = 16 - field_name_vec = ["x", "y", "z"] - offset_vec = [0, 4, 8] - for field_name, offset in zip(field_name_vec, offset_vec): - field = PointField() - field.name = field_name - field.offset = offset - field.datatype = 7 - field.count = 1 - pointcloud_msg.fields.append(field) - return pointcloud_msg - - -def get_yaw_from_quaternion(orientation): - orientation_list = [ - orientation.x, - orientation.y, - orientation.z, - orientation.w, - ] - return euler_from_quaternion(orientation_list)[2] - - -def get_quaternion_from_yaw(yaw): - q = quaternion_from_euler(0, 0, yaw) - orientation = Quaternion() - orientation.x = q[0] - orientation.y = q[1] - orientation.z = q[2] - orientation.w = q[3] - return orientation - - -class PerceptionReproducer(Node): +class PerceptionReproducer(PerceptionReplayerCommon): def __init__(self, args): - super().__init__("perception_reproducer") - self.args = args + super().__init__(args, "perception_reproducer") - # subscriber + # additional subscriber self.sub_odom = self.create_subscription( Odometry, "/localization/kinematic_state", self.on_odom, 1 ) - # publisher - if self.args.detected_object: - self.objects_pub = self.create_publisher( - DetectedObjects, "/perception/object_recognition/detection/objects", 1 - ) - elif self.args.tracked_object: - self.objects_pub = self.create_publisher( - TrackedObjects, "/perception/object_recognition/tracking/objects", 1 - ) - else: - self.objects_pub = self.create_publisher( - PredictedObjects, "/perception/object_recognition/objects", 1 - ) - - self.pointcloud_pub = self.create_publisher( - PointCloud2, "/perception/obstacle_segmentation/pointcloud", 1 - ) - self.traffic_signals_pub = self.create_publisher( - TrafficSignalArray, "/perception/traffic_light_recognition/traffic_signals", 1 - ) - self.ego_pose_idx = None self.ego_pose = None - self.prev_traffic_signals_msg = None - self.rosbag_objects_data = [] - self.rosbag_ego_odom_data = [] - self.rosbag_traffic_signals_data = [] - - # load rosbag - print("Stared loading rosbag") - if os.path.isdir(args.bag): - for bag_file in sorted(os.listdir(args.bag)): - self.load_rosbag(args.bag + "/" + bag_file) - else: - self.load_rosbag(args.bag) - print("Ended loading rosbag") - - # wait for ready to publish/subscribe - time.sleep(1.0) - + # start timer callback self.timer = self.create_timer(0.01, self.on_timer) print("Start timer callback") @@ -162,24 +49,10 @@ def on_odom(self, odom): self.ego_pose = odom.pose.pose def on_timer(self): - # kill node if required - kill_process_name = None - if self.args.detected_object: - kill_process_name = "dummy_perception_publisher_node" - elif self.args.tracked_object: - kill_process_name = "multi_object_tracker" - else: - kill_process_name = "map_based_prediction" - if kill_process_name: - try: - pid = check_output(["pidof", kill_process_name]) - process = psutil.Process(int(pid[:-1])) - process.terminate() - except CalledProcessError: - pass - timestamp = self.get_clock().now().to_msg() + self.kill_online_perception_node() + if self.args.detected_object: pointcloud_msg = create_empty_pointcloud(timestamp) self.pointcloud_pub.publish(pointcloud_msg) @@ -281,36 +154,6 @@ def find_topics_by_timestamp(self, timestamp): return objects_data, traffic_signals_data - def load_rosbag(self, rosbag2_path: str): - reader = open_reader(str(rosbag2_path)) - - topic_types = reader.get_all_topics_and_types() - # Create a map for quicker lookup - type_map = {topic_types[i].name: topic_types[i].type for i in range(len(topic_types))} - - objects_topic = ( - "/perception/object_recognition/detection/objects" - if self.args.detected_object - else "/perception/object_recognition/tracking/objects" - if self.args.tracked_object - else "/perception/object_recognition/objects" - ) - ego_odom_topic = "/localization/kinematic_state" - traffic_signals_topic = "/perception/traffic_light_recognition/traffic_signals" - topic_filter = StorageFilter(topics=[objects_topic, ego_odom_topic, traffic_signals_topic]) - reader.set_filter(topic_filter) - - while reader.has_next(): - (topic, data, stamp) = reader.read_next() - msg_type = get_message(type_map[topic]) - msg = deserialize_message(data, msg_type) - if topic == objects_topic: - self.rosbag_objects_data.append((stamp, msg)) - if topic == ego_odom_topic: - self.rosbag_ego_odom_data.append((stamp, msg)) - if topic == traffic_signals_topic: - self.rosbag_traffic_signals_data.append((stamp, msg)) - if __name__ == "__main__": parser = argparse.ArgumentParser() diff --git a/planning/planning_debug_tools/scripts/perception_replayer/utils.py b/planning/planning_debug_tools/scripts/perception_replayer/utils.py new file mode 100644 index 0000000000000..3e954fb0cd293 --- /dev/null +++ b/planning/planning_debug_tools/scripts/perception_replayer/utils.py @@ -0,0 +1,85 @@ +#!/usr/bin/env python3 + +# Copyright 2023 TIER IV, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import math + +from geometry_msgs.msg import Quaternion +import rosbag2_py +from sensor_msgs.msg import PointCloud2 +from sensor_msgs.msg import PointField +from tf_transformations import euler_from_quaternion +from tf_transformations import quaternion_from_euler + + +def get_rosbag_options(path, serialization_format="cdr"): + storage_options = rosbag2_py.StorageOptions(uri=path, storage_id="sqlite3") + + converter_options = rosbag2_py.ConverterOptions( + input_serialization_format=serialization_format, + output_serialization_format=serialization_format, + ) + + return storage_options, converter_options + + +def open_reader(path: str): + storage_options, converter_options = get_rosbag_options(path) + reader = rosbag2_py.SequentialReader() + reader.open(storage_options, converter_options) + return reader + + +def calc_squared_distance(p1, p2): + return math.sqrt((p1.x - p2.x) ** 2 + (p1.y - p2.y) ** 2) + + +def create_empty_pointcloud(timestamp): + pointcloud_msg = PointCloud2() + pointcloud_msg.header.stamp = timestamp + pointcloud_msg.header.frame_id = "map" + pointcloud_msg.height = 1 + pointcloud_msg.is_dense = True + pointcloud_msg.point_step = 16 + field_name_vec = ["x", "y", "z"] + offset_vec = [0, 4, 8] + for field_name, offset in zip(field_name_vec, offset_vec): + field = PointField() + field.name = field_name + field.offset = offset + field.datatype = 7 + field.count = 1 + pointcloud_msg.fields.append(field) + return pointcloud_msg + + +def get_yaw_from_quaternion(orientation): + orientation_list = [ + orientation.x, + orientation.y, + orientation.z, + orientation.w, + ] + return euler_from_quaternion(orientation_list)[2] + + +def get_quaternion_from_yaw(yaw): + q = quaternion_from_euler(0, 0, yaw) + orientation = Quaternion() + orientation.x = q[0] + orientation.y = q[1] + orientation.z = q[2] + orientation.w = q[3] + return orientation From bde4e373aaea2ebbb91efcc16c554bad669db317 Mon Sep 17 00:00:00 2001 From: Takayuki Murooka Date: Wed, 9 Aug 2023 03:55:43 +0900 Subject: [PATCH 2/4] implement perception_reproducer Signed-off-by: Takayuki Murooka --- .../perception_replayer.py | 125 ++++++++++++++++++ .../perception_replayer_common.py | 42 +++++- .../perception_reproducer.py | 119 ++++------------- .../time_manager_widget.py | 59 +++++++++ .../scripts/perception_replayer/utils.py | 40 ++++++ 5 files changed, 291 insertions(+), 94 deletions(-) create mode 100644 planning/planning_debug_tools/scripts/perception_replayer/perception_replayer.py create mode 100644 planning/planning_debug_tools/scripts/perception_replayer/time_manager_widget.py diff --git a/planning/planning_debug_tools/scripts/perception_replayer/perception_replayer.py b/planning/planning_debug_tools/scripts/perception_replayer/perception_replayer.py new file mode 100644 index 0000000000000..e54458fee8f69 --- /dev/null +++ b/planning/planning_debug_tools/scripts/perception_replayer/perception_replayer.py @@ -0,0 +1,125 @@ +#!/usr/bin/env python3 + +# Copyright 2023 TIER IV, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import copy +import functools +import sys + +from PyQt5.QtWidgets import QApplication +from perception_replayer_common import PerceptionReplayerCommon +import rclpy +from time_manager_widget import TimeManagerWidget +from utils import create_empty_pointcloud +from utils import translate_objects_coordinate + + +class PerceptionReplayer(PerceptionReplayerCommon): + def __init__(self, args): + super().__init__(args, "perception_replayer") + + self.bag_timestamp = self.rosbag_objects_data[0][0] + self.is_pause = False + self.rate = 1.0 + + # initialize widget + self.widget = TimeManagerWidget() + self.widget.show() + self.widget.button.clicked.connect(self.onPushed) + for button in self.widget.rate_button: + button.clicked.connect(functools.partial(self.onSetRate, button)) + + # start timer callback + self.delta_time = 0.1 + self.timer = self.create_timer(self.delta_time, self.on_timer) + print("Start timer callback") + + def on_timer(self): + timestamp = self.get_clock().now().to_msg() + + self.kill_online_perception_node() + + if self.args.detected_object: + pointcloud_msg = create_empty_pointcloud(timestamp) + self.pointcloud_pub.publish(pointcloud_msg) + + # step timestamp + if not self.is_pause: + self.bag_timestamp += self.rate * self.delta_time * 1e9 # seconds to timestamp + + # extract message by the timestamp + msgs = copy.deepcopy(self.find_topics_by_timestamp(self.bag_timestamp)) + objects_msg = msgs[0] + traffic_signals_msg = msgs[1] + + # objects + if objects_msg: + objects_msg.header.stamp = timestamp + if self.args.detected_object: + if not self.ego_pose: + print("No ego pose found.") + return + + ego_odom = self.find_ego_odom_by_timestamp(self.bag_timestamp) + log_ego_pose = ego_odom[1].pose.pose + + translate_objects_coordinate(self.ego_pose, log_ego_pose, objects_msg) + self.objects_pub.publish(objects_msg) + + # traffic signals + if traffic_signals_msg: + traffic_signals_msg.header.stamp = timestamp + self.traffic_signals_pub.publish(traffic_signals_msg) + self.prev_traffic_signals_msg = traffic_signals_msg + elif self.prev_traffic_signals_msg: + self.prev_traffic_signals_msg.header.stamp = timestamp + self.traffic_signals_pub.publish(self.prev_traffic_signals_msg) + + def onPushed(self, event): + if self.widget.button.isChecked(): + self.is_pause = True + else: + self.is_pause = False + + def onSetRate(self, button): + self.rate = float(button.text()) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("-b", "--bag", help="rosbag", default=None) + parser.add_argument( + "-d", "--detected-object", help="publish detected object", action="store_true" + ) + parser.add_argument( + "-t", "--tracked-object", help="publish tracked object", action="store_true" + ) + args = parser.parse_args() + + app = QApplication(sys.argv) + + rclpy.init() + node = PerceptionReplayer(args) + + try: + while True: + app.processEvents() + rclpy.spin_once(node, timeout_sec=0.01) + except KeyboardInterrupt: + pass + finally: + node.destroy_node() + rclpy.shutdown() diff --git a/planning/planning_debug_tools/scripts/perception_replayer/perception_replayer_common.py b/planning/planning_debug_tools/scripts/perception_replayer/perception_replayer_common.py index 07c153cb3f614..a86a9ae9b2bb0 100755 --- a/planning/planning_debug_tools/scripts/perception_replayer/perception_replayer_common.py +++ b/planning/planning_debug_tools/scripts/perception_replayer/perception_replayer_common.py @@ -23,6 +23,7 @@ from autoware_auto_perception_msgs.msg import PredictedObjects from autoware_auto_perception_msgs.msg import TrackedObjects from autoware_auto_perception_msgs.msg import TrafficSignalArray +from nav_msgs.msg import Odometry import psutil from rclpy.node import Node from rclpy.serialization import deserialize_message @@ -37,6 +38,16 @@ def __init__(self, args, name): super().__init__(name) self.args = args + self.ego_pose = None + self.rosbag_objects_data = [] + self.rosbag_ego_odom_data = [] + self.rosbag_traffic_signals_data = [] + + # subscriber + self.sub_odom = self.create_subscription( + Odometry, "/localization/kinematic_state", self.on_odom, 1 + ) + # publisher if self.args.detected_object: self.objects_pub = self.create_publisher( @@ -58,10 +69,6 @@ def __init__(self, args, name): TrafficSignalArray, "/perception/traffic_light_recognition/traffic_signals", 1 ) - self.rosbag_objects_data = [] - self.rosbag_ego_odom_data = [] - self.rosbag_traffic_signals_data = [] - # load rosbag print("Stared loading rosbag") if os.path.isdir(args.bag): @@ -74,6 +81,9 @@ def __init__(self, args, name): # wait for ready to publish/subscribe time.sleep(1.0) + def on_odom(self, odom): + self.ego_pose = odom.pose.pose + def load_rosbag(self, rosbag2_path: str): reader = open_reader(str(rosbag2_path)) @@ -120,3 +130,27 @@ def kill_online_perception_node(self): process.terminate() except CalledProcessError: pass + + def find_topics_by_timestamp(self, timestamp): + objects_data = None + for data in self.rosbag_objects_data: + if timestamp < data[0]: + objects_data = data[1] + break + + traffic_signals_data = None + for data in self.rosbag_traffic_signals_data: + if timestamp < data[0]: + traffic_signals_data = data[1] + break + + return objects_data, traffic_signals_data + + def find_ego_odom_by_timestamp(self, timestamp): + ego_odom_data = None + for data in self.rosbag_ego_odom_data: + if timestamp < data[0]: + ego_odom_data = data[1] + break + + return ego_odom_data diff --git a/planning/planning_debug_tools/scripts/perception_replayer/perception_reproducer.py b/planning/planning_debug_tools/scripts/perception_replayer/perception_reproducer.py index 216f095472aa5..3b9242bbc62b7 100644 --- a/planning/planning_debug_tools/scripts/perception_replayer/perception_reproducer.py +++ b/planning/planning_debug_tools/scripts/perception_replayer/perception_reproducer.py @@ -16,38 +16,25 @@ import argparse import copy -import math -from nav_msgs.msg import Odometry -import numpy as np from perception_replayer_common import PerceptionReplayerCommon import rclpy from utils import calc_squared_distance from utils import create_empty_pointcloud -from utils import get_quaternion_from_yaw -from utils import get_yaw_from_quaternion +from utils import translate_objects_coordinate class PerceptionReproducer(PerceptionReplayerCommon): def __init__(self, args): super().__init__(args, "perception_reproducer") - # additional subscriber - self.sub_odom = self.create_subscription( - Odometry, "/localization/kinematic_state", self.on_odom, 1 - ) - self.ego_pose_idx = None - self.ego_pose = None self.prev_traffic_signals_msg = None # start timer callback self.timer = self.create_timer(0.01, self.on_timer) print("Start timer callback") - def on_odom(self, odom): - self.ego_pose = odom.pose.pose - def on_timer(self): timestamp = self.get_clock().now().to_msg() @@ -57,68 +44,35 @@ def on_timer(self): pointcloud_msg = create_empty_pointcloud(timestamp) self.pointcloud_pub.publish(pointcloud_msg) - if self.ego_pose: - ego_odom = self.find_nearest_ego_odom_by_observation(self.ego_pose) - pose_timestamp = ego_odom[0] - log_ego_pose = ego_odom[1].pose.pose - - msgs = copy.deepcopy(self.find_topics_by_timestamp(pose_timestamp)) - objects_msg = msgs[0] - traffic_signals_msg = msgs[1] - if objects_msg: - objects_msg.header.stamp = timestamp - if self.args.detected_object: - log_ego_yaw = get_yaw_from_quaternion(log_ego_pose.orientation) - log_ego_pose_trans_mat = np.array( - [ - [ - math.cos(log_ego_yaw), - -math.sin(log_ego_yaw), - log_ego_pose.position.x, - ], - [math.sin(log_ego_yaw), math.cos(log_ego_yaw), log_ego_pose.position.y], - [0.0, 0.0, 1.0], - ] - ) - - ego_yaw = get_yaw_from_quaternion(self.ego_pose.orientation) - ego_pose_trans_mat = np.array( - [ - [math.cos(ego_yaw), -math.sin(ego_yaw), self.ego_pose.position.x], - [math.sin(ego_yaw), math.cos(ego_yaw), self.ego_pose.position.y], - [0.0, 0.0, 1.0], - ] - ) - - for o in objects_msg.objects: - log_object_pose = o.kinematics.pose_with_covariance.pose - log_object_yaw = get_yaw_from_quaternion(log_object_pose.orientation) - log_object_pos_vec = np.array( - [log_object_pose.position.x, log_object_pose.position.y, 1.0] - ) - - # translate object pose from ego pose in log to ego pose in simulation - object_pos_vec = np.linalg.inv(ego_pose_trans_mat).dot( - log_ego_pose_trans_mat.dot(log_object_pos_vec.T) - ) - - object_pose = o.kinematics.pose_with_covariance.pose - object_pose.position.x = object_pos_vec[0] - object_pose.position.y = object_pos_vec[1] - object_pose.orientation = get_quaternion_from_yaw( - log_object_yaw + log_ego_yaw - ego_yaw - ) - - self.objects_pub.publish(objects_msg) - if traffic_signals_msg: - traffic_signals_msg.header.stamp = timestamp - self.traffic_signals_pub.publish(traffic_signals_msg) - self.prev_traffic_signals_msg = traffic_signals_msg - elif self.prev_traffic_signals_msg: - self.prev_traffic_signals_msg.header.stamp = timestamp - self.traffic_signals_pub.publish(self.prev_traffic_signals_msg) - else: + if not self.ego_pose: print("No ego pose found.") + return + + # find nearest ego odom by simulation observation + ego_odom = self.find_nearest_ego_odom_by_observation(self.ego_pose) + pose_timestamp = ego_odom[0] + log_ego_pose = ego_odom[1].pose.pose + + # extract message by the nearest ego odom timestamp + msgs = copy.deepcopy(self.find_topics_by_timestamp(pose_timestamp)) + objects_msg = msgs[0] + traffic_signals_msg = msgs[1] + + # objects + if objects_msg: + objects_msg.header.stamp = timestamp + if self.args.detected_object: + translate_objects_coordinate(self.ego_pose, log_ego_pose, objects_msg) + self.objects_pub.publish(objects_msg) + + # traffic signals + if traffic_signals_msg: + traffic_signals_msg.header.stamp = timestamp + self.traffic_signals_pub.publish(traffic_signals_msg) + self.prev_traffic_signals_msg = traffic_signals_msg + elif self.prev_traffic_signals_msg: + self.prev_traffic_signals_msg.header.stamp = timestamp + self.traffic_signals_pub.publish(self.prev_traffic_signals_msg) def find_nearest_ego_odom_by_observation(self, ego_pose): if self.ego_pose_idx: @@ -139,21 +93,6 @@ def find_nearest_ego_odom_by_observation(self, ego_pose): return self.rosbag_ego_odom_data[nearest_idx] - def find_topics_by_timestamp(self, timestamp): - objects_data = None - for data in self.rosbag_objects_data: - if timestamp < data[0]: - objects_data = data[1] - break - - traffic_signals_data = None - for data in self.rosbag_traffic_signals_data: - if timestamp < data[0]: - traffic_signals_data = data[1] - break - - return objects_data, traffic_signals_data - if __name__ == "__main__": parser = argparse.ArgumentParser() diff --git a/planning/planning_debug_tools/scripts/perception_replayer/time_manager_widget.py b/planning/planning_debug_tools/scripts/perception_replayer/time_manager_widget.py new file mode 100644 index 0000000000000..aad803f8ffbce --- /dev/null +++ b/planning/planning_debug_tools/scripts/perception_replayer/time_manager_widget.py @@ -0,0 +1,59 @@ +#!/usr/bin/env python3 + +# Copyright 2023 TIER IV, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from PyQt5 import QtCore +from PyQt5.QtWidgets import QGridLayout +from PyQt5.QtWidgets import QMainWindow +from PyQt5.QtWidgets import QPushButton +from PyQt5.QtWidgets import QSizePolicy +from PyQt5.QtWidgets import QWidget + + +class TimeManagerWidget(QMainWindow): + def __init__(self): + super(self.__class__, self).__init__() + self.setupUI() + + def setupUI(self): + self.setObjectName("MainWindow") + self.resize(480, 120) + self.setWindowFlags(QtCore.Qt.WindowStaysOnTopHint) + + self.centralwidget = QWidget(self) + self.centralwidget.setObjectName("centralwidget") + + self.gridLayout = QGridLayout(self.centralwidget) + self.gridLayout.setContentsMargins(10, 10, 10, 10) + self.gridLayout.setObjectName("gridLayout") + + self.rate_button = [] + for i, rate in enumerate([0.1, 0.5, 1.0, 2.0, 5.0, 10.0]): + btn = QPushButton(str(rate)) + btn.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding) + self.rate_button.append(btn) + self.gridLayout.addWidget(self.rate_button[-1], 0, i, 1, 1) + + self.button = QPushButton("pause") + self.button.setCheckable(True) + self.button.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding) + + self.gridLayout.addWidget(self.button, 1, 0, 1, -1) + self.setCentralWidget(self.centralwidget) + + """ + def onSetRate(self, button): + print(button.text(), button) + """ diff --git a/planning/planning_debug_tools/scripts/perception_replayer/utils.py b/planning/planning_debug_tools/scripts/perception_replayer/utils.py index 3e954fb0cd293..572922d4c7abb 100644 --- a/planning/planning_debug_tools/scripts/perception_replayer/utils.py +++ b/planning/planning_debug_tools/scripts/perception_replayer/utils.py @@ -17,6 +17,7 @@ import math from geometry_msgs.msg import Quaternion +import numpy as np import rosbag2_py from sensor_msgs.msg import PointCloud2 from sensor_msgs.msg import PointField @@ -83,3 +84,42 @@ def get_quaternion_from_yaw(yaw): orientation.z = q[2] orientation.w = q[3] return orientation + + +def translate_objects_coordinate(ego_pose, log_ego_pose, objects_msg): + log_ego_yaw = get_yaw_from_quaternion(log_ego_pose.orientation) + log_ego_pose_trans_mat = np.array( + [ + [ + math.cos(log_ego_yaw), + -math.sin(log_ego_yaw), + log_ego_pose.position.x, + ], + [math.sin(log_ego_yaw), math.cos(log_ego_yaw), log_ego_pose.position.y], + [0.0, 0.0, 1.0], + ] + ) + + ego_yaw = get_yaw_from_quaternion(ego_pose.orientation) + ego_pose_trans_mat = np.array( + [ + [math.cos(ego_yaw), -math.sin(ego_yaw), ego_pose.position.x], + [math.sin(ego_yaw), math.cos(ego_yaw), ego_pose.position.y], + [0.0, 0.0, 1.0], + ] + ) + + for o in objects_msg.objects: + log_object_pose = o.kinematics.pose_with_covariance.pose + log_object_yaw = get_yaw_from_quaternion(log_object_pose.orientation) + log_object_pos_vec = np.array([log_object_pose.position.x, log_object_pose.position.y, 1.0]) + + # translate object pose from ego pose in log to ego pose in simulation + object_pos_vec = np.linalg.inv(ego_pose_trans_mat).dot( + log_ego_pose_trans_mat.dot(log_object_pos_vec.T) + ) + + object_pose = o.kinematics.pose_with_covariance.pose + object_pose.position.x = object_pos_vec[0] + object_pose.position.y = object_pos_vec[1] + object_pose.orientation = get_quaternion_from_yaw(log_object_yaw + log_ego_yaw - ego_yaw) From a5ae764335b184da2ebb851b046e844c567af283 Mon Sep 17 00:00:00 2001 From: Takayuki Murooka Date: Wed, 9 Aug 2023 04:02:35 +0900 Subject: [PATCH 3/4] change permission Signed-off-by: Takayuki Murooka --- .../scripts/perception_replayer/perception_replayer.py | 0 .../scripts/perception_replayer/perception_replayer_common.py | 0 .../scripts/perception_replayer/perception_reproducer.py | 0 3 files changed, 0 insertions(+), 0 deletions(-) mode change 100644 => 100755 planning/planning_debug_tools/scripts/perception_replayer/perception_replayer.py mode change 100755 => 100644 planning/planning_debug_tools/scripts/perception_replayer/perception_replayer_common.py mode change 100644 => 100755 planning/planning_debug_tools/scripts/perception_replayer/perception_reproducer.py diff --git a/planning/planning_debug_tools/scripts/perception_replayer/perception_replayer.py b/planning/planning_debug_tools/scripts/perception_replayer/perception_replayer.py old mode 100644 new mode 100755 diff --git a/planning/planning_debug_tools/scripts/perception_replayer/perception_replayer_common.py b/planning/planning_debug_tools/scripts/perception_replayer/perception_replayer_common.py old mode 100755 new mode 100644 diff --git a/planning/planning_debug_tools/scripts/perception_replayer/perception_reproducer.py b/planning/planning_debug_tools/scripts/perception_replayer/perception_reproducer.py old mode 100644 new mode 100755 From 1d431080104136d52b6ca5f31edb1a4ffe68cb4b Mon Sep 17 00:00:00 2001 From: Takayuki Murooka Date: Wed, 9 Aug 2023 04:23:31 +0900 Subject: [PATCH 4/4] update Signed-off-by: Takayuki Murooka --- planning/planning_debug_tools/CMakeLists.txt | 3 +- planning/planning_debug_tools/Readme.md | 32 ++++++++++++++++++- .../perception_reproducer.py | 3 -- .../time_manager_widget.py | 29 +++++++---------- 4 files changed, 45 insertions(+), 22 deletions(-) diff --git a/planning/planning_debug_tools/CMakeLists.txt b/planning/planning_debug_tools/CMakeLists.txt index c2bee10f972f1..bf7e36c4c0c0a 100644 --- a/planning/planning_debug_tools/CMakeLists.txt +++ b/planning/planning_debug_tools/CMakeLists.txt @@ -50,6 +50,7 @@ ament_auto_package( install(PROGRAMS scripts/trajectory_visualizer.py scripts/closest_velocity_checker.py - scripts/perception_reproducer.py + scripts/perception_replayer/perception_reproducer.py + scripts/perception_replayer/perception_replayer.py DESTINATION lib/${PROJECT_NAME} ) diff --git a/planning/planning_debug_tools/Readme.md b/planning/planning_debug_tools/Readme.md index 6d4958783f86d..fe9615614de81 100644 --- a/planning/planning_debug_tools/Readme.md +++ b/planning/planning_debug_tools/Readme.md @@ -188,7 +188,7 @@ PlotCurrentVelocity('localization_kinematic_state', '/localization/kinematic_sta ## Perception reproducer -This script can overlay the perception results from the rosbag on the planning simulator. +This script can overlay the perception results from the rosbag on the planning simulator synchronized with the simulator's ego pose. In detail, the ego pose in the rosbag which is closest to the current ego pose in the simulator is calculated. The perception results at the timestamp of the closest ego pose is extracted, and published. @@ -211,3 +211,33 @@ ros2 run planning_debug_tools perception_reproducer.py -b ``` Instead of publishing predicted objects, you can publish detected/tracked objects by designating `-d` or `-t`, respectively. + +## Perception replayer + +A part of the feature is under development. + +This script can overlay the perception results from the rosbag on the planning simulator. + +In detail, this script publishes the data at a certain timestamp from the rosbag. +The timestamp will increase according to the real time without any operation. +By using the GUI, you can modify the timestamp by pausing, changing the rate or going back into the past. + +### How to use + +First, launch the planning simulator, and put the ego pose. +Then, run the script according to the following command. + +By designating a rosbag, perception replayer can be launched. +The GUI is launched as well with which a timestamp of rosbag can be managed. + +```bash +ros2 run planning_debug_tools perception_replayer.py -b +``` + +You can designate multiple rosbags in the directory. + +```bash +ros2 run planning_debug_tools perception_replayer.py -b +``` + +Instead of publishing predicted objects, you can publish detected/tracked objects by designating `-d` or `-t`, respectively. diff --git a/planning/planning_debug_tools/scripts/perception_replayer/perception_reproducer.py b/planning/planning_debug_tools/scripts/perception_replayer/perception_reproducer.py index 3b9242bbc62b7..36b62bc1ec5ee 100755 --- a/planning/planning_debug_tools/scripts/perception_replayer/perception_reproducer.py +++ b/planning/planning_debug_tools/scripts/perception_replayer/perception_reproducer.py @@ -107,11 +107,8 @@ def find_nearest_ego_odom_by_observation(self, ego_pose): rclpy.init() node = PerceptionReproducer(args) - rclpy.spin(node) try: - rclpy.init() - node = PerceptionReproducer(args) rclpy.spin(node) except KeyboardInterrupt: pass diff --git a/planning/planning_debug_tools/scripts/perception_replayer/time_manager_widget.py b/planning/planning_debug_tools/scripts/perception_replayer/time_manager_widget.py index aad803f8ffbce..77bf65a17b848 100644 --- a/planning/planning_debug_tools/scripts/perception_replayer/time_manager_widget.py +++ b/planning/planning_debug_tools/scripts/perception_replayer/time_manager_widget.py @@ -28,32 +28,27 @@ def __init__(self): self.setupUI() def setupUI(self): - self.setObjectName("MainWindow") + self.setObjectName("PerceptionReplayer") self.resize(480, 120) self.setWindowFlags(QtCore.Qt.WindowStaysOnTopHint) - self.centralwidget = QWidget(self) - self.centralwidget.setObjectName("centralwidget") + self.central_widget = QWidget(self) + self.central_widget.setObjectName("central_widget") - self.gridLayout = QGridLayout(self.centralwidget) - self.gridLayout.setContentsMargins(10, 10, 10, 10) - self.gridLayout.setObjectName("gridLayout") + self.grid_layout = QGridLayout(self.central_widget) + self.grid_layout.setContentsMargins(10, 10, 10, 10) + self.grid_layout.setObjectName("grid_layout") self.rate_button = [] for i, rate in enumerate([0.1, 0.5, 1.0, 2.0, 5.0, 10.0]): - btn = QPushButton(str(rate)) - btn.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding) - self.rate_button.append(btn) - self.gridLayout.addWidget(self.rate_button[-1], 0, i, 1, 1) + button = QPushButton(str(rate)) + button.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding) + self.rate_button.append(button) + self.grid_layout.addWidget(self.rate_button[-1], 0, i, 1, 1) self.button = QPushButton("pause") self.button.setCheckable(True) self.button.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding) - self.gridLayout.addWidget(self.button, 1, 0, 1, -1) - self.setCentralWidget(self.centralwidget) - - """ - def onSetRate(self, button): - print(button.text(), button) - """ + self.grid_layout.addWidget(self.button, 1, 0, 1, -1) + self.setCentralWidget(self.central_widget)