Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

ROS nodes FPS performance measurements #419

Merged
merged 80 commits into from
Apr 11, 2023
Merged
Changes from 1 commit
Commits
Show all changes
80 commits
Select commit Hold shift + click to select a range
f199d38
Added performance node
tsampazk Mar 13, 2023
a1d88ad
Added time measurement for tool inference and publishing
tsampazk Mar 13, 2023
733bd91
Moved performance start time before image preprocessing
tsampazk Mar 21, 2023
ad08216
audiovisual_emotion_recognition_node.py performance and some formatting
tsampazk Mar 22, 2023
69f6b55
binary_high_resolution_node.py performance
tsampazk Mar 22, 2023
be8a07a
continual_skeleton_based_action_recognition_node.py performance and s…
tsampazk Mar 22, 2023
e855f2e
face_detection_retinaface_node.py performance
tsampazk Mar 22, 2023
e476392
face_recognition_node.py performance
tsampazk Mar 22, 2023
5238abe
facial_emotion_estimation_node.py performance
tsampazk Mar 22, 2023
50ee6e4
fall_detection_node.py performance
tsampazk Mar 22, 2023
481aade
heart_anomaly_detection_node.py performance
tsampazk Mar 22, 2023
472e3f8
hr_pose_estimation_node.py performance
tsampazk Mar 22, 2023
1ccbe63
landmark_based_facial_expression_recognition_node.py performance
tsampazk Mar 22, 2023
09d480f
object_detection_2d_centernet_node.py performance
tsampazk Mar 22, 2023
c38f7ee
object_detection_2d_detr_node.py performance
tsampazk Mar 22, 2023
a5a50d9
object_detection_2d_gem_node.py performance
tsampazk Mar 22, 2023
65e34d8
object_detection_2d_nanodet_node.py performance
tsampazk Mar 22, 2023
3af0f63
object_detection_2d_ssd_node.py performance
tsampazk Mar 22, 2023
e8ad377
object_detection_2d_yolov3_node.py performance
tsampazk Mar 22, 2023
c6a66ce
object_detection_2d_yolov5_node.py performance
tsampazk Mar 22, 2023
8611525
object_detection_3d_voxel_node.py performance
tsampazk Mar 22, 2023
613fdc5
object_tracking_2d_deep_sort_node.py performance
tsampazk Mar 22, 2023
15d889a
object_tracking_2d_fair_mot_node.py performance
tsampazk Mar 22, 2023
2f3dfee
object_tracking_2d_siamrpn_node.py performance
tsampazk Mar 22, 2023
d5a52f9
object_tracking_3d_ab3dmot_node.py performance
tsampazk Mar 22, 2023
b62a867
panoptic_segmentation_efficient_lps_node.py performance
tsampazk Mar 22, 2023
4cee604
panoptic_segmentation_efficient_ps_node.py performance
tsampazk Mar 22, 2023
b3b616f
pose_estimation_node.py minor fixes
tsampazk Mar 22, 2023
576b896
rgbd_hand_gesture_recognition_node.py performance
tsampazk Mar 22, 2023
8bcc78b
semantic_segmentation_bisenet_node.py performance
tsampazk Mar 22, 2023
5cad7be
skeleton_based_action_recognition_node.py performance
tsampazk Mar 22, 2023
6340548
speech_command_recognition_node.py performance
tsampazk Mar 22, 2023
8bd1ccc
video_activity_recognition_node.py performance
tsampazk Mar 22, 2023
fafa9b3
Added section for utility nodes and entry for the performance node
tsampazk Mar 23, 2023
2614e17
Added entry in notes for logging performance
tsampazk Mar 23, 2023
11e4b5b
Added entries for the new performance topic in all nodes
tsampazk Mar 23, 2023
1836478
audiovisual_emotion_recognition_node.py ROS2 performance
tsampazk Mar 24, 2023
dd3f416
binary_high_resolution_node.py ROS2 performance
tsampazk Mar 24, 2023
c434ba2
continual_skeleton_based_action_recognition_node.py ROS2 performance
tsampazk Mar 24, 2023
6da8dd0
face_detection_retinaface_node.py ROS2 performance
tsampazk Mar 24, 2023
8e22ab5
face_recognition_node.py ROS2 performance
tsampazk Mar 24, 2023
1ada491
facial_emotion_estimation_node.py ROS2 performance
tsampazk Mar 24, 2023
ad12e95
fall_detection_node.py ROS2 performance
tsampazk Mar 24, 2023
7d40dcb
heart_anomaly_detection_node.py ROS2 performance
tsampazk Mar 24, 2023
b10db4c
hr_pose_estimation_node.py ros1 renamed class
tsampazk Mar 24, 2023
691b1f9
hr_pose_estimation_node.py ROS2 performance
tsampazk Mar 24, 2023
2b697c3
landmark_based_facial_expression_recognition_node.py ROS2 performance
tsampazk Mar 24, 2023
0085265
object_detection_2d_centernet_node.py ROS2 performance
tsampazk Mar 24, 2023
62217b7
object_detection_2d_detr_node.py ROS2 performance
tsampazk Mar 24, 2023
fed1430
object_detection_2d_gem_node.py ROS2 performance
tsampazk Mar 24, 2023
c1f684f
object_detection_2d_nanodet_node.py ROS1 minor fix
tsampazk Mar 24, 2023
5826087
object_detection_2d_nanodet_node.py ROS2 performance
tsampazk Mar 24, 2023
1a83b22
object_detection_2d_ssd_node.py ROS2 performance
tsampazk Mar 24, 2023
94c80be
object_detection_2d_yolov3_node.py ROS1 class name fix
tsampazk Mar 24, 2023
2248fbf
object_detection_2d_yolov3_node.py ROS2 performance
tsampazk Mar 24, 2023
3cd9839
object_detection_2d_yolov5_node.py ROS1 class name fix
tsampazk Mar 24, 2023
f63144c
object_detection_2d_yolov5_node.py ROS2 performance
tsampazk Mar 24, 2023
c709714
object_detection_3d_voxel_node.py ROS2 performance
tsampazk Mar 24, 2023
be5d0c2
object_tracking_2d_deep_sort_node.py ROS2 performance
tsampazk Mar 24, 2023
c1be58d
object_tracking_2d_fair_mot_node.py ROS2 performance
tsampazk Mar 24, 2023
75f0faa
object_tracking_2d_siamrpn_node.py ROS2 performance
tsampazk Mar 24, 2023
cba574c
object_tracking_3d_ab3dmot_node.py ROS2 performance
tsampazk Mar 24, 2023
e6dd984
panoptic_segmentation_efficient_lps_node.py ROS1 some imports rearran…
tsampazk Mar 24, 2023
c8a9e48
panoptic_segmentation_efficient_lps_node.py ROS2 performance
tsampazk Mar 24, 2023
068eff8
panoptic_segmentation_efficient_ps_node.py ROS1 some imports rearrang…
tsampazk Mar 24, 2023
b9d8168
panoptic_segmentation_efficient_ps_node.py ROS2 performance
tsampazk Mar 24, 2023
08d24ab
pose_estimation_node.py ROS2 performance
tsampazk Mar 24, 2023
c84d9e8
rgbd_hand_gesture_recognition_node.py ROS2 performance
tsampazk Mar 24, 2023
1e99fd2
semantic_segmentation_bisenet_node.py ROS2 performance
tsampazk Mar 24, 2023
bb5f274
skeleton_based_action_recognition_node.py ROS2 performance and some r…
tsampazk Mar 24, 2023
f68523d
speech_command_recognition_node.py ROS2 performance
tsampazk Mar 24, 2023
afa47a0
video_activity_recognition_node.py ROS2 performance
tsampazk Mar 24, 2023
e0fb25b
Added ROS2 performance_node.py
tsampazk Mar 24, 2023
5d13583
ROS2 readme updates for performance topics/node
tsampazk Mar 24, 2023
4b47c30
Apply suggestions from code review
tsampazk Mar 27, 2023
75994ee
Disable running tracking infer if no tracking is needed for ab3dmot ROS2
tsampazk Mar 27, 2023
5182ad0
Disable running tracking infer if no tracking is needed for ab3dmot ROS1
tsampazk Mar 27, 2023
3a30c7d
Apply suggestions from code review
tsampazk Apr 5, 2023
4c1701a
Removed additional spaces after suggested change
tsampazk Apr 5, 2023
ff6e1d6
Applied review suggestion to ROS2 node as well
tsampazk Apr 5, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
rgbd_hand_gesture_recognition_node.py performance
tsampazk committed Mar 22, 2023
commit 576b89698bd10c10ddf2325ca80bdf997c4ec3e7
Original file line number Diff line number Diff line change
@@ -19,9 +19,11 @@
import cv2
import numpy as np
import torch
from time import perf_counter

import rospy
import message_filters
from std_msgs.msg import Float32
from sensor_msgs.msg import Image as ROS_Image
from vision_msgs.msg import Classification2D

@@ -34,7 +36,8 @@ class RgbdHandGestureNode:

def __init__(self, input_rgb_image_topic="/kinect2/qhd/image_color_rect",
input_depth_image_topic="/kinect2/qhd/image_depth_rect",
output_gestures_topic="/opendr/gestures", device="cuda", delay=0.1):
output_gestures_topic="/opendr/gestures",
performance_topic=None, device="cuda", delay=0.1):
"""
Creates a ROS Node for gesture recognition from RGBD. Assuming that the following drivers have been installed:
https://github.com/OpenKinect/libfreenect2 and https://github.com/code-iai/iai_kinect2.
@@ -44,6 +47,9 @@ def __init__(self, input_rgb_image_topic="/kinect2/qhd/image_color_rect",
:type input_depth_image_topic: str
:param output_gestures_topic: Topic to which we are publishing the predicted gesture class
:type output_gestures_topic: str
:param performance_topic: Topic to which we are publishing performance information (if None, no performance
message is published)
:type performance_topic: str
:param device: device on which we are running inference ('cpu' or 'cuda')
:type device: str
:param delay: Define the delay (in seconds) with which rgb message and depth message can be synchronized
@@ -56,6 +62,11 @@ def __init__(self, input_rgb_image_topic="/kinect2/qhd/image_color_rect",

self.gesture_publisher = rospy.Publisher(output_gestures_topic, Classification2D, queue_size=10)

if performance_topic is not None:
self.performance_publisher = rospy.Publisher(performance_topic, Float32, queue_size=1)
else:
self.performance_publisher = None

self.bridge = ROSBridge()

# Initialize the gesture recognition
@@ -93,7 +104,8 @@ def callback(self, rgb_data, depth_data):
:param depth_data: input depth image message
:type depth_data: sensor_msgs.msg.Image
"""

if self.performance_publisher:
start_time = perf_counter()
# Convert sensor_msgs.msg.Image into OpenDR Image and preprocess
rgb_image = self.bridge.from_ros_image(rgb_data, encoding='bgr8')
depth_data.encoding = 'mono16'
@@ -103,6 +115,13 @@ def callback(self, rgb_data, depth_data):
# Run gesture recognition
gesture_class = self.gesture_learner.infer(img)

if self.performance_publisher:
end_time = perf_counter()
fps = 1.0 / (end_time - start_time) # NOQA
fps_msg = Float32()
fps_msg.data = fps
self.performance_publisher.publish(fps_msg)

# Publish results
ros_gesture = self.bridge.from_category_to_rosclass(gesture_class)
self.gesture_publisher.publish(ros_gesture)
@@ -115,8 +134,8 @@ def preprocess(self, rgb_image, depth_image):
:param depth_image: input depth image
:type depth_image: engine.data.Image
"""
rgb_image = rgb_image.convert(format='channels_last') / (2**8 - 1)
depth_image = depth_image.convert(format='channels_last') / (2**16 - 1)
rgb_image = rgb_image.convert(format='channels_last') / (2 ** 8 - 1)
depth_image = depth_image.convert(format='channels_last') / (2 ** 16 - 1)

# resize the images to 224x224
rgb_image = cv2.resize(rgb_image, (224, 224))
@@ -139,10 +158,12 @@ def preprocess(self, rgb_image, depth_image):
type=str, default="/kinect2/qhd/image_depth_rect")
parser.add_argument("-o", "--output_gestures_topic", help="Topic name for predicted gesture class",
type=str, default="/opendr/gestures")
parser.add_argument("--performance_topic", help="Topic name for performance messages, disabled (None) by default",
type=str, default=None)
parser.add_argument("--device", help="Device to use (cpu, cuda)", type=str, default="cuda",
choices=["cuda", "cpu"])
parser.add_argument("--delay", help="The delay (in seconds) with which RGB message and"
"depth message can be synchronized", type=float, default=0.1)
"depth message can be synchronized", type=float, default=0.1)

args = parser.parse_args()

@@ -161,7 +182,8 @@ def preprocess(self, rgb_image, depth_image):

gesture_node = RgbdHandGestureNode(input_rgb_image_topic=args.input_rgb_image_topic,
input_depth_image_topic=args.input_depth_image_topic,
output_gestures_topic=args.output_gestures_topic, device=device,
delay=args.delay)
output_gestures_topic=args.output_gestures_topic,
performance_topic=args.performance_topic,
device=device, delay=args.delay)

gesture_node.listen()