diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md new file mode 100644 index 0000000000..a45a35bedb --- /dev/null +++ b/.github/ISSUE_TEMPLATE.md @@ -0,0 +1,26 @@ +* Before opening a new issue, we wanted to provide you with some useful suggestions (Click "Preview" above for a better view): + + * Consider checking our ROS RealSense Wrapper documentation [README](https://github.com/IntelRealSense/realsense-ros/blob/ros2-master/README.md). + * Have you looked in our [Discussions](https://github.com/IntelRealSense/realsense-ros/discussions)? + * Try [searching our GitHub Issues](https://github.com/IntelRealSense/realsense-ros/issues) (open and closed) for a similar issue. + +* All users are welcomed to report bugs, ask questions, suggest or request enhancements and generally feel free to open new issue, even if they haven't followed any of the suggestions above :) + +---------------------------------------------------------------------------------------------------- + +| Required Info | | +|---------------------------------|------------------------------------------------------------ | +| Camera Model | { D405 / D415 / D435 / D435i / D455 / D457 } | +| Firmware Version | (Open RealSense Viewer --> Click info) | +| Operating System & Version | { Win (10/11) / Linux (Ubuntu 18/20/22) / MacOS } | +| Kernel Version (Linux Only) | (e.g. 5.4) | +| Platform | PC/Raspberry Pi/ NVIDIA Jetson / etc.. | +| Librealsense SDK Version | { 2.. } | +| Language | {C/C#/labview/opencv/pcl/python/unity } | +| Segment | {Robot/Smartphone/VR/AR/others } | +| ROS Distro | {Iron/Humble/Jazzy/Rolling/etc.. } | +| RealSense ROS Wrapper Version | {4.51.1, 4.54.1, etc..} | + + +### Issue Description + diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index ecd144d630..bae808d38e 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -6,13 +6,17 @@ on: push: branches: - ros2-development + - ros2-master pull_request: branches: - ros2-development + - ros2-master # Allows you to run this workflow manually from the Actions tab workflow_dispatch: +permissions: read-all + # A workflow run is made up of one or more jobs that can run sequentially or in parallel # This workflow contains a single job called "build" @@ -23,41 +27,64 @@ jobs: strategy: fail-fast: false matrix: - ros_distro: [rolling, iron, humble] + ros_distro: [rolling, iron, humble, foxy, jazzy] include: - ros_distro: 'rolling' - os: ubuntu-22.04 + os: ubuntu-24.04 + - ros_distro: 'jazzy' + os: ubuntu-24.04 - ros_distro: 'iron' os: ubuntu-22.04 - ros_distro: 'humble' os: ubuntu-22.04 + - ros_distro: 'foxy' + os: ubuntu-20.04 steps: - name: Setup ROS2 Workspace - run: | + run: | mkdir -p ${{github.workspace}}/ros2/src - - uses: actions/checkout@v2 + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 #v4 with: path: 'ros2/src/realsense-ros' - + - name: Check Copyright & Line-Endings shell: bash run: | cd ${{github.workspace}}/ros2/src/realsense-ros/scripts ./pr_check.sh - - name: build ROS2 - uses: ros-tooling/setup-ros@v0.7 + # setup-ros@v0.6 is the last version supporting foxy (EOL) + # setup-ros@v0.7 is needed to support humble/iron/rolling/jazzy + # so, seperating steps with if conditions + - name: build ROS2 for foxy + if: ${{ matrix.ros_distro == 'foxy' }} + uses: ros-tooling/setup-ros@236ab287884fd5a314fc030e91b2017abb46719e #v0.6 with: required-ros-distributions: ${{ matrix.ros_distro }} - - - name: Build RealSense SDK 2.0 from source + - name: build ROS2 for humble/iron/rolling/jazzy + if: ${{ matrix.ros_distro != 'foxy' }} + uses: ros-tooling/setup-ros@a6ce30ecca1e5dcc10ae5e6a44fe2169115bf852 #v0.7 + with: + required-ros-distributions: ${{ matrix.ros_distro }} + + - name: Checkout librealsense/development + uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 #v4 + with: + repository: IntelRealSense/librealsense + path: librealsense + ref: development + + - name: Build RealSense SDK 2.0 (development branch) from source run: | - cd ${{github.workspace}} - git clone https://github.com/IntelRealSense/librealsense.git -b development - cd librealsense + + # libusb-1.0-0-dev is needed for librealsense build in ubuntu 20.04 + # This apt install command will be ignored in ubuntu 22.04 as libusb-1.0-0-dev already installed there + sudo apt install -y libusb-1.0-0-dev + + cd ${{github.workspace}}/librealsense sudo mkdir build cd build sudo cmake ../ -DCMAKE_BUILD_TYPE=Release -DBUILD_EXAMPLES=false -DBUILD_GRAPHICAL_EXAMPLES=false @@ -66,17 +93,20 @@ jobs: sudo make -j10 sudo make install - - name: Build RealSense ROS2 Wrapper - run: | + - name: Build RealSense ROS2 Wrapper from source + run: | echo "source /opt/ros/${{ matrix.ros_distro }}/setup.bash" >> ${{github.workspace}}/.bashrc source ${{github.workspace}}/.bashrc cd ${{github.workspace}}/ros2 echo "================= ROSDEP UPDATE =====================" - rosdep update --rosdistro ${{ matrix.ros_distro }} + rosdep update --rosdistro ${{ matrix.ros_distro }} --include-eol-distros echo "================= ROSDEP INSTALL ====================" rosdep install -i --reinstall --from-path src --rosdistro ${{ matrix.ros_distro }} --skip-keys=librealsense2 -y echo "================== COLCON BUILD ======================" - colcon build --cmake-args '-DBUILD_TOOLS=ON' + # Enable 'BUILD_TOOLS' through cmake arguments. Since, this variable is available only in realsense2_camera package + # and not in realsense2_camera_msgs and realsense2_description packages, to avoid warnings from these packages, + # use '--no-warn-unused-cli'. Ref: https://cmake.org/cmake/help/v3.0/manual/cmake.1.html + colcon build --cmake-args '-DBUILD_TOOLS=ON' --no-warn-unused-cli ## This step is commented out since we don't use rosbag files in "Run Tests" step below. ## Please uncomment when "Run Tests" step is fixed to run all tests. @@ -89,22 +119,49 @@ jobs: # bag_filename="https://librealsense.intel.com/rs-tests/D435i_Depth_and_IMU_Stands_still.bag"; # wget $bag_filename -P "records/" # sudo apt install ros-${{ matrix.ros_distro}}-launch-pytest - - - name: Install Packages For Tests + + - name: Install Packages For Foxy Tests + if: ${{ matrix.ros_distro == 'foxy' }} + run: | + # To avoid mixing of 'apt' provided packages and 'pip' provided packages, one way is to create virtual env + # and manage python packages within it. Ref: https://peps.python.org/pep-0668/ + python3 -m venv .venv + # Activate the virtual env such that following python related commands run within it. + source .venv/bin/activate + sudo apt-get install python3-pip + # numpy-quaternion needs numpy<2.0.0. Chose 1.24.1 as it is the highest version that support foxy. + pip3 install --force-reinstall numpy==1.24.1 + pip3 install numpy-quaternion tqdm pyyaml + + - name: Install Packages For Humble/Iron/Rolling/Jazzy Tests + if: ${{ matrix.ros_distro != 'foxy' }} run: | + # To avoid mixing of 'apt' provided packages and 'pip' provided packages, one way is to create virtual env + # and manage python packages within it. Ref: https://peps.python.org/pep-0668/ + python3 -m venv .venv + # Activate the virtual env such that following python related commands run within it. + source .venv/bin/activate sudo apt-get install python3-pip - pip3 install numpy --upgrade - pip3 install numpy-quaternion tqdm - + # numpy-quaternion needs numpy<2.0.0. Chose 1.26.4 as it is the lowest working version for ubuntu 24.04. + pip3 install --force-reinstall numpy==1.26.4 + pip3 install numpy-quaternion tqdm pyyaml + - name: Run Tests run: | cd ${{github.workspace}}/ros2 source ${{github.workspace}}/.bashrc . install/local_setup.bash + # the next command might be needed for foxy distro, since this package is not installed + # by default in ubuntu 20.04. For other distro, the apt install command will be ignored. + sudo apt install -y ros-${{matrix.ros_distro}}-sensor-msgs-py + source ../.venv/bin/activate python3 src/realsense-ros/realsense2_camera/scripts/rs2_test.py non_existent_file - + + # don't run integration tests for foxy since some testing dependecies packages like + # tf_ros_py are not avaialble + # TODO: check when we can run integration tests on rolling - name: Run integration tests - if: ${{ matrix.ros_distro != 'rolling'}} + if: ${{ matrix.ros_distro != 'rolling' && matrix.ros_distro != 'foxy' }} run: | cd ${{github.workspace}}/ros2 source ${{github.workspace}}/.bashrc diff --git a/.github/workflows/pre-release.yml b/.github/workflows/pre-release.yml index 4b54702bcc..54c1d78d64 100644 --- a/.github/workflows/pre-release.yml +++ b/.github/workflows/pre-release.yml @@ -23,6 +23,7 @@ on: # Allows you to run this workflow manually from the Actions tab workflow_dispatch: +permissions: read-all jobs: build: @@ -44,6 +45,6 @@ jobs: BASEDIR: ${{ github.workspace }}/.work steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 #v4 - name: industrial_ci - uses: ros-industrial/industrial_ci@master + uses: ros-industrial/industrial_ci@d23b9ad2c63bfad638a2b1fe3df34b8df9a2f17b #Replace reference to 'master' with the latest commit hash diff --git a/.github/workflows/static_analysis.yaml b/.github/workflows/static_analysis.yaml index 7672fd6956..b8cdeca85e 100644 --- a/.github/workflows/static_analysis.yaml +++ b/.github/workflows/static_analysis.yaml @@ -6,12 +6,14 @@ on: pull_request: branches: ['**'] +permissions: read-all + jobs: cppcheck: name: cppcheck runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 #v4 - name: Install shell: bash diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 3632ad8983..252bc4283f 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -5,7 +5,7 @@ This project welcomes third-party code via GitHub pull requests. You are welcome to propose and discuss enhancements using project [issues](https://github.com/IntelRealSense/realsense-ros/issues). > **Branching Policy**: -> The `ros2-development` branch is considered stable, at all times. +> The `ros2-master` branch is considered stable, at all times. > If you plan to propose a patch, please commit into the `ros2-development` branch, or its own feature branch. In addition, please run `pr_check.sh` under `scripts` directory. This scripts verify compliance with project's standards: diff --git a/Intel Copyright b/Intel Copyright new file mode 100644 index 0000000000..7cb7ff0ca4 --- /dev/null +++ b/Intel Copyright @@ -0,0 +1,7 @@ +Copyright (2017-2024), Intel Corporation. +This "Software" is furnished under license and may only be used or copied in accordance with the terms of that license. +No license, express or implied, by estoppel or otherwise, to any intellectual property rights is granted by this document. +The Software is subject to change without notice, and should not be construed as a commitment by Intel Corporation to market, license, sell or support any product or technology. +Unless otherwise provided for in the license under which this Software is provided, the Software is provided AS IS, with no warranties of any kind, express or implied. +Except as expressly permitted by the Software license, neither Intel Corporation nor its suppliers assumes any responsibility or liability for any errors or inaccuracies that may appear herein. +Except as expressly permitted by the Software license, no part of the Software may be reproduced, stored in a retrieval system, transmitted in any form, or distributed by any means without the express written consent of Intel Corporation. diff --git a/NOTICE b/NOTICE deleted file mode 100644 index f986287012..0000000000 --- a/NOTICE +++ /dev/null @@ -1,4 +0,0 @@ -This project uses code from the following third-party projects, listed here -with the full text of their respective licenses. - -ddynamic_reconfigure (BSD) - https://github.com/awesomebytes/ddynamic_reconfigure diff --git a/NOTICE.md b/NOTICE.md new file mode 100644 index 0000000000..246ae907fa --- /dev/null +++ b/NOTICE.md @@ -0,0 +1,327 @@ +# **Intel® RealSense™ ROS Wrapper Third Party Programs** +## This file specifies all 3rd party SW components used for Intel® RealSense™ ROS Wrapper and the inbound license for each of these 3rd party components. + +# **Apache License 2.0 Third Party Programs** + +|Component|Home Page|License|copyright| +| :- | :- | :- | :- | +|librealsense2|

|Apache License 2.0|None| +|Rclcpp|

 

|Apache License 2.0|None| +|rclcpp\_components|

 

|Apache License 2.0|None| +|builtin\_interfaces|

|Apache License 2.0|None| +|cv\_bridge|

 

|Apache License 2.0|None| +|geometry\_msgs|

 

|Apache License 2.0|None| +|nav\_msgs|

 

|Apache License 2.0|None| +|std\_msgs|

 

|Apache License 2.0|None| +|sensor\_msgs||Apache License 2.0|None| +|ament\_cmake|

|Apache License 2.0|None| +|rosidl\_default\_generators|

|Apache License 2.0|None| +|launch\_pytest|

|Apache License 2.0|None| +|ros\_environment|

|Apache License 2.0|None| +|ros2topic|

|Apache License 2.0|None| +|

rosidl\_default\_runtime

|

|Apache License 2.0|None| +|launch\_ros|

|Apache License 2.0|None| +|ament\_lint\_auto|

|Apache License 2.0|None| +|ament\_lint\_common|

|Apache License 2.0|None| +|ament\_cmake\_gtest|

|Apache License 2.0|None| +|launch\_testing|

|Apache License 2.0|None| +|sensor\_msgs\_py|

|Apache License 2.0|None| +|Open CV|

|Apache License 2.0|

Copyright (C) 2000-2022, Intel Corporation, all rights reserved.

Copyright (C) 2009-2011, Willow Garage Inc., all rights reserved.

Copyright (C) 2009-2016, NVIDIA Corporation, all rights reserved.

Copyright (C) 2010-2013, Advanced Micro Devices, Inc., all rights reserved.

Copyright (C) 2015-2023, OpenCV Foundation, all rights reserved.

Copyright (C) 2008-2016, Itseez Inc., all rights reserved.

Copyright (C) 2019-2023, Xperience AI, all rights reserved.

Copyright (C) 2019-2022, Shenzhen Institute of Artificial Intelligence and Robotics for Society, all rights reserved.

Copyright (C) 2022-2023, Southern University of Science And Technology, all rights reserved.

| +## +## **Apache License 2.0** +Apache License +Version 2.0, January 2004 +http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1\. Definitions. + +"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. + +"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. + +"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. + +"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. + +"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. + +"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. + +"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). + +"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. + +"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." + +"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. + +2\. Grant of Copyright License. + +Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. + +3\. Grant of Patent License. + +Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. + +4\. Redistribution. + +You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: + +You must give any other recipients of the Work or Derivative Works a copy of this License; and +You must cause any modified files to carry prominent notices stating that You changed the files; and +You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and +If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. +You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. + +5\. Submission of Contributions. + +Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. + +6\. Trademarks. + +This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. + +7\. Disclaimer of Warranty. + +Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. + +8\. Limitation of Liability. + +In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. + +9\. Accepting Warranty or Additional Liability. + +While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +BSD 3-clause License Third Party Programs +# **BSD 3-cluase License Third Party Programs** + +|Component|Home Page|License|copyright| +| :- | :- | :- | :- | +|xacro|

|BSD 3-clause|Copyright Robert Haschke, Bielefeld University| +|tf2\_ros\_py|

|BSD 3-clause|

Copyright (c) 2008, Willow Garage, Inc.

| +|tf2|

|BSD 3-clause|

Copyright (c) 2008, Willow Garage, Inc.

| +|tf2\_ros|

|BSD 3-clause|

Copyright (c) 2008, Willow Garage, Inc.

| +|

diagnostic\_updater

|

|BSD 3-clause|

Copyright 2008 - 2012, Willow Garage, Inc.

` `2012 - 2022, Open Source Robotics Foundation and contributors

| +## **BSD 3-clause** +Note: This license has also been called the “New BSD License” or “Modified BSD License”. See also the [2-clause BSD License](https://opensource.org/license/bsd-2-clause/). + +Copyright + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +1\. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +2\. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +3\. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# **Mozilla Public License 2.0 Third Party Programs** + +|Component|Home Page|License|copyright| +| :- | :- | :- | :- | +|Eigen|

|Mozilla Public License 2.0|None| +## **Mozilla Public License 2.0** + +Mozilla Public License + +Version 2.0 + +1\. Definitions + +1\.1. “Contributor” + + means each individual or legal entity that creates, contributes to the creation of, or owns Covered Software. + +1\.2. “Contributor Version” + + means the combination of the Contributions of others (if any) used by a Contributor and that particular Contributor’s Contribution. + +1\.3. “Contribution” + + means Covered Software of a particular Contributor. + +1\.4. “Covered Software” + + means Source Code Form to which the initial Contributor has attached the notice in Exhibit A, the Executable Form of such Source Code Form, and Modifications of such Source Code Form, in each case including portions thereof. + +1\.5. “Incompatible With Secondary Licenses” + + means + +` `that the initial Contributor has attached the notice described in Exhibit B to the Covered Software; or + +` `that the Covered Software was made available under the terms of version 1.1 or earlier of the License, but not also under the terms of a Secondary License. + +1\.6. “Executable Form” + + means any form of the work other than Source Code Form. + +1\.7. “Larger Work” + + means a work that combines Covered Software with other material, in a separate file or files, that is not Covered Software. + +1\.8. “License” + + means this document. + +1\.9. “Licensable” + + means having the right to grant, to the maximum extent possible, whether at the time of the initial grant or subsequently, any and all of the rights conveyed by this License. + +1\.10. “Modifications” + + means any of the following: + +` `any file in Source Code Form that results from an addition to, deletion from, or modification of the contents of Covered Software; or + +` `any new file in Source Code Form that contains any Covered Software. + +1\.11. “Patent Claims” of a Contributor + + means any patent claim(s), including without limitation, method, process, and apparatus claims, in any patent Licensable by such Contributor that would be infringed, but for the grant of the License, by the making, using, selling, offering for sale, having made, import, or transfer of either its Contributions or its Contributor Version. + +1\.12. “Secondary License” + + means either the GNU General Public License, Version 2.0, the GNU Lesser General Public License, Version 2.1, the GNU Affero General Public License, Version 3.0, or any later versions of those licenses. + +1\.13. “Source Code Form” + + means the form of the work preferred for making modifications. + +1\.14. “You” (or “Your”) + + means an individual or a legal entity exercising rights under this License. For legal entities, “You” includes any entity that controls, is controlled by, or is under common control with You. For purposes of this definition, “control” means (a) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (b) ownership of more than fifty percent (50%) of the outstanding shares or beneficial ownership of such entity. + +2\. License Grants and Conditions + +2\.1. Grants + +Each Contributor hereby grants You a world-wide, royalty-free, non-exclusive license: + + under intellectual property rights (other than patent or trademark) Licensable by such Contributor to use, reproduce, make available, modify, display, perform, distribute, and otherwise exploit its Contributions, either on an unmodified basis, with Modifications, or as part of a Larger Work; and + + under Patent Claims of such Contributor to make, use, sell, offer for sale, have made, import, and otherwise transfer either its Contributions or its Contributor Version. + +2\.2. Effective Date + +The licenses granted in Section 2.1 with respect to any Contribution become effective for each Contribution on the date the Contributor first distributes such Contribution. + +2\.3. Limitations on Grant Scope + +The licenses granted in this Section 2 are the only rights granted under this License. No additional rights or licenses will be implied from the distribution or licensing of Covered Software under this License. Notwithstanding Section 2.1(b) above, no patent license is granted by a Contributor: + + for any code that a Contributor has removed from Covered Software; or + + for infringements caused by: (i) Your and any other third party’s modifications of Covered Software, or (ii) the combination of its Contributions with other software (except as part of its Contributor Version); or + + under Patent Claims infringed by Covered Software in the absence of its Contributions. + +This License does not grant any rights in the trademarks, service marks, or logos of any Contributor (except as may be necessary to comply with the notice requirements in Section 3.4). + +2\.4. Subsequent Licenses + +No Contributor makes additional grants as a result of Your choice to distribute the Covered Software under a subsequent version of this License (see Section 10.2) or under the terms of a Secondary License (if permitted under the terms of Section 3.3). + +2\.5. Representation + +Each Contributor represents that the Contributor believes its Contributions are its original creation(s) or it has sufficient rights to grant the rights to its Contributions conveyed by this License. + +2\.6. Fair Use + +This License is not intended to limit any rights You have under applicable copyright doctrines of fair use, fair dealing, or other equivalents. + +2\.7. Conditions + +Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in Section 2.1. + +3\. Responsibilities + +3\.1. Distribution of Source Form + +All distribution of Covered Software in Source Code Form, including any Modifications that You create or to which You contribute, must be under the terms of this License. You must inform recipients that the Source Code Form of the Covered Software is governed by the terms of this License, and how they can obtain a copy of this License. You may not attempt to alter or restrict the recipients’ rights in the Source Code Form. + +3\.2. Distribution of Executable Form + +If You distribute Covered Software in Executable Form then: + + such Covered Software must also be made available in Source Code Form, as described in Section 3.1, and You must inform recipients of the Executable Form how they can obtain a copy of such Source Code Form by reasonable means in a timely manner, at a charge no more than the cost of distribution to the recipient; and + + You may distribute such Executable Form under the terms of this License, or sublicense it under different terms, provided that the license for the Executable Form does not attempt to limit or alter the recipients’ rights in the Source Code Form under this License. + +3\.3. Distribution of a Larger Work + +You may create and distribute a Larger Work under terms of Your choice, provided that You also comply with the requirements of this License for the Covered Software. If the Larger Work is a combination of Covered Software with a work governed by one or more Secondary Licenses, and the Covered Software is not Incompatible With Secondary Licenses, this License permits You to additionally distribute such Covered Software under the terms of such Secondary License(s), so that the recipient of the Larger Work may, at their option, further distribute the Covered Software under the terms of either this License or such Secondary License(s). + +3\.4. Notices + +You may not remove or alter the substance of any license notices (including copyright notices, patent notices, disclaimers of warranty, or limitations of liability) contained within the Source Code Form of the Covered Software, except that You may alter any license notices to the extent required to remedy known factual inaccuracies. + +3\.5. Application of Additional Terms + +You may choose to offer, and to charge a fee for, warranty, support, indemnity or liability obligations to one or more recipients of Covered Software. However, You may do so only on Your own behalf, and not on behalf of any Contributor. You must make it absolutely clear that any such warranty, support, indemnity, or liability obligation is offered by You alone, and You hereby agree to indemnify every Contributor for any liability incurred by such Contributor as a result of warranty, support, indemnity or liability terms You offer. You may include additional disclaimers of warranty and limitations of liability specific to any jurisdiction. + +4\. Inability to Comply Due to Statute or Regulation + +If it is impossible for You to comply with any of the terms of this License with respect to some or all of the Covered Software due to statute, judicial order, or regulation then You must: (a) comply with the terms of this License to the maximum extent possible; and (b) describe the limitations and the code they affect. Such description must be placed in a text file included with all distributions of the Covered Software under this License. Except to the extent prohibited by statute or regulation, such description must be sufficiently detailed for a recipient of ordinary skill to be able to understand it. + +5\. Termination + +5\.1. The rights granted under this License will terminate automatically if You fail to comply with any of its terms. However, if You become compliant, then the rights granted under this License from a particular Contributor are reinstated (a) provisionally, unless and until such Contributor explicitly and finally terminates Your grants, and (b) on an ongoing basis, if such Contributor fails to notify You of the non-compliance by some reasonable means prior to 60 days after You have come back into compliance. Moreover, Your grants from a particular Contributor are reinstated on an ongoing basis if such Contributor notifies You of the non-compliance by some reasonable means, this is the first time You have received notice of non-compliance with this License from such Contributor, and You become compliant prior to 30 days after Your receipt of the notice. + +5\.2. If You initiate litigation against any entity by asserting a patent infringement claim (excluding declaratory judgment actions, counter-claims, and cross-claims) alleging that a Contributor Version directly or indirectly infringes any patent, then the rights granted to You by any and all Contributors for the Covered Software under Section 2.1 of this License shall terminate. + +5\.3. In the event of termination under Sections 5.1 or 5.2 above, all end user license agreements (excluding distributors and resellers) which have been validly granted by You or Your distributors under this License prior to termination shall survive termination. + +6\. Disclaimer of Warranty + +Covered Software is provided under this License on an “as is” basis, without warranty of any kind, either expressed, implied, or statutory, including, without limitation, warranties that the Covered Software is free of defects, merchantable, fit for a particular purpose or non-infringing. The entire risk as to the quality and performance of the Covered Software is with You. Should any Covered Software prove defective in any respect, You (not any Contributor) assume the cost of any necessary servicing, repair, or correction. This disclaimer of warranty constitutes an essential part of this License. No use of any Covered Software is authorized under this License except under this disclaimer. + +7\. Limitation of Liability + +Under no circumstances and under no legal theory, whether tort (including negligence), contract, or otherwise, shall any Contributor, or anyone who distributes Covered Software as permitted above, be liable to You for any direct, indirect, special, incidental, or consequential damages of any character including, without limitation, damages for lost profits, loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses, even if such party shall have been informed of the possibility of such damages. This limitation of liability shall not apply to liability for death or personal injury resulting from such party’s negligence to the extent applicable law prohibits such limitation. Some jurisdictions do not allow the exclusion or limitation of incidental or consequential damages, so this exclusion and limitation may not apply to You. + +8\. Litigation + +Any litigation relating to this License may be brought only in the courts of a jurisdiction where the defendant maintains its principal place of business and such litigation shall be governed by laws of that jurisdiction, without reference to its conflict-of-law provisions. Nothing in this Section shall prevent a party’s ability to bring cross-claims or counter-claims. + +9\. Miscellaneous + +This License represents the complete agreement concerning the subject matter hereof. If any provision of this License is held to be unenforceable, such provision shall be reformed only to the extent necessary to make it enforceable. Any law or regulation which provides that the language of a contract shall be construed against the drafter shall not be used to construe this License against a Contributor. + +10\. Versions of the License + +10\.1. New Versions + +Mozilla Foundation is the license steward. Except as provided in Section 10.3, no one other than the license steward has the right to modify or publish new versions of this License. Each version will be given a distinguishing version number. + +10\.2. Effect of New Versions + +You may distribute the Covered Software under the terms of the version of the License under which You originally received the Covered Software, or under the terms of any subsequent version published by the license steward. + +10\.3. Modified Versions + +If you create software not governed by this License, and you want to create a new license for such software, you may create and use a modified version of this License if you rename the license and remove any references to the name of the license steward (except to note that such modified license differs from this License). + +10\.4. Distributing Source Code Form that is Incompatible With Secondary Licenses + +If You choose to distribute Source Code Form that is Incompatible With Secondary Licenses under the terms of this version of the License, the notice described in Exhibit B of this License must be attached. + +Exhibit A - Source Code Form License Notice + + This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain one at https://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular file, then You may include the notice in a location (such as a LICENSE file in a relevant directory) where a recipient would be likely to look for such a notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - “Incompatible With Secondary Licenses” Notice + + This Source Code Form is “Incompatible With Secondary Licenses”, as defined by the Mozilla Public License, v. 2.0. + diff --git a/README.md b/README.md index a77621972e..49f4a1c474 100644 --- a/README.md +++ b/README.md @@ -1,9 +1,10 @@ +

Intel® RealSense™

- ROS2 packages for using Intel RealSense D400 cameras.
+ ROS Wrapper for Intel(R) RealSense(TM) Cameras
Latest release notes

@@ -11,8 +12,11 @@ [![rolling][rolling-badge]][rolling] +[![jazzy][jazzy-badge]][jazzy] [![iron][iron-badge]][iron] [![humble][humble-badge]][humble] +[![foxy][foxy-badge]][foxy] +[![ubuntu24][ubuntu24-badge]][ubuntu24] [![ubuntu22][ubuntu22-badge]][ubuntu22] [![ubuntu20][ubuntu20-badge]][ubuntu20] @@ -24,7 +28,8 @@ ## Table of contents * [ROS1 and ROS2 legacy](#ros1-and-ros2-legacy) - * [Installation](#installation) + * [Installation on Ubuntu](#installation-on-ubuntu) + * [Installation on Windows](#installation-on-windows) * [Usage](#usage) * [Starting the camera node](#start-the-camera-node) * [Camera name and namespace](#camera-name-and-camera-namespace) @@ -37,6 +42,7 @@ * [Metadata Topic](#metadata-topic) * [Post-Processing Filters](#post-processing-filters) * [Available Services](#available-services) + * [Available Actions](#available-actions) * [Efficient intra-process communication](#efficient-intra-process-communication) * [Contributing](CONTRIBUTING.md) * [License](LICENSE) @@ -47,15 +53,15 @@
- Intel RealSense ROS1 Wrapper + ROS1 Wrapper for Intel® RealSense™ cameras - Intel Realsense ROS1 Wrapper is not supported anymore, since our developers team are focusing on ROS2 distro.
+ ROS1 Wrapper for Intel® RealSense™ cameras is not supported anymore, since our developers team are focusing on ROS2 distro.
For ROS1 wrapper, go to ros1-legacy branch
- Moving from ros2-legacy to ros2-development + Moving from ros2-legacy to ros2-master * Changed Parameters: @@ -63,7 +69,7 @@ - For video streams: **\.profile** replaces **\_width**, **\_height**, **\_fps** - **ROS2-legacy (Old)**: - ros2 launch realsense2_camera rs_launch.py depth_width:=640 depth_height:=480 depth_fps:=30.0 infra1_width:=640 infra1_height:=480 infra1_fps:=30.0 - - **ROS2-development (New)**: + - **ROS2-master (New)**: - ros2 launch realsense2_camera rs_launch.py depth_module.profile:=640x480x30 - Removed paramets **\_frame_id**, **\_optical_frame_id**. frame_ids are now defined by camera_name - **"filters"** is removed. All filters (or post-processing blocks) are enabled/disabled using **"\.enable"** @@ -78,46 +84,52 @@
-# Installation +# Installation on Ubuntu
Step 1: Install the ROS2 distribution - + +- #### Ubuntu 24.04: + - [ROS2 Jazzy](https://docs.ros.org/en/jazzy/Installation/Ubuntu-Install-Debians.html) + - #### Ubuntu 22.04: - [ROS2 Iron](https://docs.ros.org/en/iron/Installation/Ubuntu-Install-Debians.html) - [ROS2 Humble](https://docs.ros.org/en/humble/Installation/Ubuntu-Install-Debians.html) - + #### Ubuntu 20.04 + - [ROS2 Foxy](https://docs.ros.org/en/foxy/Installation/Ubuntu-Install-Debians.html)
Step 2: Install latest Intel® RealSense™ SDK 2.0 - + + **Please choose only one option from the 3 options below (in order to prevent multiple versions installation and workspace conflicts)** + - #### Option 1: Install librealsense2 debian package from Intel servers - Jetson users - use the [Jetson Installation Guide](https://github.com/IntelRealSense/librealsense/blob/master/doc/installation_jetson.md) - Otherwise, install from [Linux Debian Installation Guide](https://github.com/IntelRealSense/librealsense/blob/master/doc/distribution_linux.md#installing-the-packages) - In this case treat yourself as a developer: make sure to follow the instructions to also install librealsense2-dev and librealsense2-dkms packages -- #### Option 2: Install librealsense2 (without graphical tools and examples) debian package from ROS servers: +- #### Option 2: Install librealsense2 (without graphical tools and examples) debian package from ROS servers (Foxy EOL distro is not supported by this option): - [Configure](http://wiki.ros.org/Installation/Ubuntu/Sources) your Ubuntu repositories - Install all realsense ROS packages by ```sudo apt install ros--librealsense2*``` - For example, for Humble distro: ```sudo apt install ros-humble-librealsense2*``` - #### Option 3: Build from source - - Download the latest [Intel® RealSense™ SDK 2.0](https://github.com/IntelRealSense/librealsense/releases/tag/v2.53.1) + - Download the latest [Intel® RealSense™ SDK 2.0](https://github.com/IntelRealSense/librealsense/releases/tag/v2.55.1) - Follow the instructions under [Linux Installation](https://github.com/IntelRealSense/librealsense/blob/master/doc/installation.md)
- Step 3: Install Intel® RealSense™ ROS2 wrapper + Step 3: Install ROS Wrapper for Intel® RealSense™ cameras -#### Option 1: Install debian package from ROS servers +#### Option 1: Install debian package from ROS servers (Foxy EOL distro is not supported by this option): - [Configure](http://wiki.ros.org/Installation/Ubuntu/Sources) your Ubuntu repositories - Install all realsense ROS packages by ```sudo apt install ros--realsense2-*``` - For example, for Humble distro: ```sudo apt install ros-humble-realsense2-*``` @@ -130,9 +142,9 @@ cd ~/ros2_ws/src/ ``` - - Clone the latest ROS2 Intel® RealSense™ wrapper from [here](https://github.com/IntelRealSense/realsense-ros.git) into '~/ros2_ws/src/' + - Clone the latest ROS Wrapper for Intel® RealSense™ cameras from [here](https://github.com/IntelRealSense/realsense-ros.git) into '~/ros2_ws/src/' ```bashrc - git clone https://github.com/IntelRealSense/realsense-ros.git -b ros2-development + git clone https://github.com/IntelRealSense/realsense-ros.git -b ros2-master cd ~/ros2_ws ``` @@ -151,7 +163,7 @@ - Source environment ```bash - ROS_DISTRO= # set your ROS_DISTRO: iron, humble + ROS_DISTRO= # set your ROS_DISTRO: jazzy, iron, humble, foxy source /opt/ros/$ROS_DISTRO/setup.bash cd ~/ros2_ws . install/local_setup.bash @@ -161,6 +173,80 @@
+# Installation on Windows + **PLEASE PAY ATTENTION: ROS Wrapper for Intel® RealSense™ cameras is not meant to be supported on Windows by our team, since ROS2 and its packages are still not fully supported over Windows. We added these installation steps below in order to try and make it easier for users who already started working with ROS2 on Windows and want to take advantage of the capabilities of our RealSense cameras** + +
+ + Step 1: Install the ROS2 distribution + + +- #### Windows 10/11 + + **Please choose only one option from the two options below (in order to prevent multiple versions installation and workspace conflicts)** + + - Manual install from ROS2 formal documentation: + - [ROS2 Jazzy](https://docs.ros.org/en/jazzy/Installation/Windows-Install-Binary.html) + - [ROS2 Iron](https://docs.ros.org/en/iron/Installation/Windows-Install-Binary.html) + - [ROS2 Humble](https://docs.ros.org/en/humble/Installation/Windows-Install-Binary.html) + - [ROS2 Foxy](https://docs.ros.org/en/foxy/Installation/Windows-Install-Binary.html) + - Microsoft IOT binary installation: + - https://ms-iot.github.io/ROSOnWindows/GettingStarted/SetupRos2.html + - Pay attention that the examples of install are for Foxy distro (which is not supported anymore by ROS Wrapper for Intel® RealSense™ cameras) + - Please replace the word "Foxy" with Humble, Iron or Jazzy, depends on the chosen distro. +
+ +
+ + Step 2: Download RealSense™ ROS2 Wrapper and RealSense™ SDK 2.0 source code from github: + + +- Download ROS Wrapper for Intel® RealSense™ cameras source code from [ROS Wrapper for Intel® RealSense™ cameras releases](https://github.com/IntelRealSense/realsense-ros/releases) +- Download the corrosponding supported Intel® RealSense™ SDK 2.0 source code from the **"Supported RealSense SDK" section** of the specific release you chose fronm the link above +- Place the librealsense folder inside the realsense-ros folder, to make the librealsense package set beside realsense2_camera, realsense2_camera_msgs and realsense2_description packages +
+ +
+ + Step 3: Build + + +1. Before starting building of our packages, make sure you have OpenCV for Windows installed on your machine. If you choose the Microsoft IOT way to install it, it will be installed automatically. Later, when colcon build, you might need to expose this installation folder by setting CMAKE_PREFIX_PATH, PATH, or OpenCV_DIR environment variables +2. Run "x64 Native Tools Command Prompt for VS 2019" as administrator +3. Setup ROS2 Environment (Do this for every new terminal/cmd you open): + - If you choose the Microsoft IOT Binary option for installation + ``` + > C:\opt\ros\humble\x64\setup.bat + ``` + + - If you choose the ROS2 formal documentation: + ``` + > call C:\dev\ros2_iron\local_setup.bat + ``` +4. Change directory to realsense-ros folder + ```bash + > cd C:\ros2_ws\realsense-ros + ``` +5. Build librealsense2 package only + ```bash + > colcon build --packages-select librealsense2 --cmake-args -DBUILD_EXAMPLES=OFF -DBUILD_WITH_STATIC_CRT=OFF -DBUILD_GRAPHICAL_EXAMPLES=OFF + ``` + - User can add `--event-handlers console_direct+` parameter to see more debug outputs of the colcon build +6. Build the other packages + ```bash + > colcon build --packages-select realsense2_camera_msgs realsense2_description realsense2_camera + ``` + - User can add `--event-handlers console_direct+` parameter to see more debug outputs of the colcon build + +7. Setup environment with new installed packages (Do this for every new terminal/cmd you open): + ```bash + > call install\setup.bat + ``` +
+ +
+ + # Usage ## Start the camera node @@ -172,7 +258,7 @@ #### with ros2 launch: ros2 launch realsense2_camera rs_launch.py - ros2 launch realsense2_camera rs_launch.py depth_module.profile:=1280x720x30 pointcloud.enable:=true + ros2 launch realsense2_camera rs_launch.py depth_module.depth_profile:=1280x720x30 pointcloud.enable:=true
@@ -212,6 +298,7 @@ User can set the camera name and camera namespace, to distinguish between camera /robot1/D455_1/imu > ros2 service list + /robot1/D455_1/hw_reset /robot1/D455_1/device_info ``` @@ -234,6 +321,7 @@ User can set the camera name and camera namespace, to distinguish between camera /camera/camera/imu > ros2 service list +/camera/camera/hw_reset /camera/camera/device_info ``` @@ -251,10 +339,10 @@ User can set the camera name and camera namespace, to distinguish between camera #### Parameters that can be modified during runtime: - All of the filters and sensors inner parameters. - Video Sensor Parameters: (```depth_module``` and ```rgb_camera```) - - They have, at least, the **profile** parameter. + - They have, at least, the **_profile** parameter. - The profile parameter is a string of the following format: \X\X\ (The dividing character can be X, x or ",". Spaces are ignored.) - - For example: ```depth_module.profile:=640x480x30 rgb_camera.profile:=1280x720x30``` - - Since infra, infra1, infra2 and depth are all streams of the depth_module, their width, height and fps are defined by the same param **depth_module.profile** + - For example: ```depth_module.depth_profile:=640x480x30 depth_module.infra_profile:=640x480x30 rgb_camera.color_profile:=1280x720x30``` + - Note: The param **depth_module.infra_profile** is common for all infra streams. i.e., infra 0, 1 & 2. - If the specified combination of parameters is not available by the device, the default or previously set configuration will be used. - Run ```ros2 param describe ``` to get the list of supported profiles. - Note: Should re-enable the stream for the change to take effect. @@ -281,6 +369,7 @@ User can set the camera name and camera namespace, to distinguish between camera - can be any of *infra, infra1, infra2, color, depth, gyro, accel*. - Available values are the following strings: `SYSTEM_DEFAULT`, `DEFAULT`, `PARAMETER_EVENTS`, `SERVICES_DEFAULT`, `PARAMETERS`, `SENSOR_DATA`. - For example: ```depth_qos:=SENSOR_DATA``` + - Pointcloud QoS is controlled with the `pointcloud.pointcloud_qos` parameter in the pointcloud filter, refer to the Post-Processing Filters section for details. - Reference: [ROS2 QoS profiles formal documentation](https://docs.ros.org/en/rolling/Concepts/About-Quality-of-Service-Settings.html#qos-profiles) - **Notice:** ****_info_qos** refers to both camera_info topics and metadata topics. - **tf_publish_rate**: @@ -305,6 +394,17 @@ User can set the camera name and camera namespace, to distinguish between camera - 1 -> **copy**: Every gyro message will be attached by the last accel message. - 2 -> **linear_interpolation**: Every gyro message will be attached by an accel message which is interpolated to gyro's timestamp. - Note: When the param *unite_imu_method* is dynamically updated, re-enable either gyro or accel stream for the change to take effect. +- **accelerate_gpu_with_glsl**: + - Boolean: GPU accelerated with GLSL for processing PointCloud and Colorizer filters. + - Note: + - To have smooth transition between the processing blocks when this parameter is updated dynamically, the node will: + - Stop the video sensors + - Do necessary GLSL configuration + - And then, start the video sensors + - To enable GPU acceleration, turn ON `BUILD_ACCELERATE_GPU_WITH_GLSL` during build: + ```bash + colcon build --cmake-args '-DBUILD_ACCELERATE_GPU_WITH_GLSL=ON' + ``` #### Parameters that cannot be changed in runtime: - **serial_no**: @@ -501,18 +601,32 @@ The following post processing filters are available: * The texture of the pointcloud can be modified using the `pointcloud.stream_filter` parameter.
* The depth FOV and the texture FOV are not similar. By default, pointcloud is limited to the section of depth containing the texture. You can have a full depth to pointcloud, coloring the regions beyond the texture with zeros, by setting `pointcloud.allow_no_texture_points` to true. * pointcloud is of an unordered format by default. This can be changed by setting `pointcloud.ordered_pc` to true. + * The QoS of the pointcloud topic is independent from depth and color streams and can be controlled with the `pointcloud.pointcloud_qos` parameter. + - The same set of QoS values are supported as other streams, refer to _qos in the Parameters section of this page. + - The launch file should include the parameter with initial QoS value, for example,`{'name': 'pointcloud.pointcloud_qos', 'default': 'SENSOR_DATA', 'description': 'pointcloud qos'}` + - The QoS value can also be overridden at launch with command option, for example, `pointcloud.pointcloud_qos:=SENSOR_DATA` + - At runtime, the QoS can be changed dynamically but require the filter re-enable for the change to take effect, for example, + ```bash + ros2 param set /camera/camera pointcloud.pointcloud_qos SENSOR_DATA + ros2 param set /camera/camera pointcloud.enable false + ros2 param set /camera/camera pointcloud.enable true + ``` - ```hdr_merge```: Allows depth image to be created by merging the information from 2 consecutive frames, taken with different exposure and gain values. - - `depth_module.hdr_enabled`: to enable/disable HDR - - The way to set exposure and gain values for each sequence in runtime is by first selecting the sequence id, using the `depth_module.sequence_id` parameter and then modifying the `depth_module.gain`, and `depth_module.exposure`. - - From FW versions 5.14.x.x and above, if HDR is enabled, the preset configs (like exposure, gain, etc.,) cannot be updated. - - Disable the HDR first using `depth_module.hdr_enabled` parameter and then, update the required presets. - - To view the effect on the infrared image for each sequence id use the `filter_by_sequence_id.sequence_id` parameter. - - To initialize these parameters in start time use the following parameters: - - `depth_module.exposure.1` - - `depth_module.gain.1` - - `depth_module.exposure.2` - - `depth_module.gain.2` - - For in-depth review of the subject please read the accompanying [white paper](https://dev.intelrealsense.com/docs/high-dynamic-range-with-stereoscopic-depth-cameras). + - `depth_module.hdr_enabled`: to enable/disable HDR. The way to set exposure and gain values for each sequence: + - during Runtime: + - is by first selecting the sequence id, using the `depth_module.sequence_id` parameter and then modifying the `depth_module.gain`, and `depth_module.exposure`. + - From FW versions 5.14.x.x and above, if HDR is enabled, the preset configs (like exposure, gain, etc.,) cannot be updated. + - Disable the HDR first using `depth_module.hdr_enabled` parameter and then, update the required presets. + - during Launch time of the node: + - is by setting below parameters + - `depth_module.exposure.1` + - `depth_module.gain.1` + - `depth_module.exposure.2` + - `depth_module.gain.2` + - Make sure to set `depth_module.hdr_enabled` to true, otherwise these parameters won't be considered. + - To view the effect on the infrared image for each sequence id use the `filter_by_sequence_id.sequence_id` parameter. + - For in-depth review of the subject please read the accompanying [white paper](https://dev.intelrealsense.com/docs/high-dynamic-range-with-stereoscopic-depth-cameras). + - **Note**: Auto exposure functionality is not supported when HDR is enabled. i.e., Auto exposure will be auto-disabled if HDR is enabled. - The following filters have detailed descriptions in : https://github.com/IntelRealSense/librealsense/blob/master/doc/post-processing-filters.md - ```disparity_filter``` - convert depth to disparity before applying other filters and back. @@ -526,8 +640,82 @@ Each of the above filters have it's own parameters, following the naming convent
## Available services - -- device_info : retrieve information about the device - serial_number, firmware_version etc. Type `ros2 interface show realsense2_camera_msgs/srv/DeviceInfo` for the full list. Call example: `ros2 service call /camera/camera/device_info realsense2_camera_msgs/srv/DeviceInfo` + +### hw_reset: + - reset the device. The call stops all the streams too. + - Call example: `ros2 service call /camera/camera/hw_reset std_srvs/srv/Empty` + +### device_info: + - retrieve information about the device - serial_number, firmware_version etc. + - Type `ros2 interface show realsense2_camera_msgs/srv/DeviceInfo` for the full list. + - Call example: `ros2 service call /camera/camera/device_info realsense2_camera_msgs/srv/DeviceInfo` + +### calib_config_read (for specific camera modules): + - Read calibration config. + - Note that reading calibration config is applicable only in Safey Service Mode + - Type `ros2 interface show realsense2_camera_msgs/srv/CalibConfigRead` for the full request/response fields. + - Call example: `ros2 service call /camera/camera/calib_config_read realsense2_camera_msgs/srv/CalibConfigRead` +
+ Click to see the full response of the call example + + `response: realsense2_camera_msgs.srv.CalibConfigRead_Response(success=True, error_message='', calib_config='{"calibration_config":{"camera_position":{"rotation":[[0.0,0.0,1.0],[-1.0,0.0,0.0],[0.0,-1.0,0.0]],"translation":[0.0,0.0,0.0]},"crypto_signature":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"roi_0":{"vertex_0":[0,0],"vertex_1":[0,0],"vertex_2":[0,0],"vertex_3":[0,0]},"roi_1":{"vertex_0":[0,0],"vertex_1":[0,0],"vertex_2":[0,0],"vertex_3":[0,0]},"roi_2":{"vertex_0":[0,0],"vertex_1":[0,0],"vertex_2":[0,0],"vertex_3":[0,0]},"roi_3":{"vertex_0":[0,0],"vertex_1":[0,0],"vertex_2":[0,0],"vertex_3":[0,0]},"roi_num_of_segments":0}}')` + +
+ +### calib_config_write (for specific camera modules): + - Write calibration config. + - Note that writing calibration config is applicable only in Safey Service Mode + - Type `ros2 interface show realsense2_camera_msgs/srv/CalibConfigWrite` for the full request/response fields. + - Only for commnad line usage, user should escape all " with \\". Using ros2 services API from rclcpp/rclpy doesn't need escaping. e.g.,: + +
+ Click to see full call example + + `ros2 service call /camera/camera/calib_config_write realsense2_camera_msgs/srv/CalibConfigWrite "{calib_config: '{\"calibration_config\":{\"camera_position\":{\"rotation\":[[0.0,0.0,1.0],[-1.0,0.0,0.0],[0.0,-1.0,0.0]],\"translation\":[0.0,0.0,0.0]},\"crypto_signature\":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],\"roi_0\":{\"vertex_0\":[0,0],\"vertex_1\":[0,0],\"vertex_2\":[0,0],\"vertex_3\":[0,0]},\"roi_1\":{\"vertex_0\":[0,0],\"vertex_1\":[0,0],\"vertex_2\":[0,0],\"vertex_3\":[0,0]},\"roi_2\":{\"vertex_0\":[0,0],\"vertex_1\":[0,0],\"vertex_2\":[0,0],\"vertex_3\":[0,0]},\"roi_3\":{\"vertex_0\":[0,0],\"vertex_1\":[0,0],\"vertex_2\":[0,0],\"vertex_3\":[0,0]},\"roi_num_of_segments\":0}}' }"` + +
+ + - [JSON calib config example](realsense2_camera/examples/d500_tables/calib_config_example.json) + - Result example: `realsense2_camera_msgs.srv.CalibConfigWrite_Response(success=True, error_message='')` + +
+ +## Available actions + +### triggered_calibration (supported only for D500 devices) + - Type `ros2 interface show realsense2_camera_msgs/action/TriggeredCalibration` for the full request/result/feedback fields. + ``` + # request + string json "calib run" # default value + --- + # result + bool success + string error_msg + string calibration + float32 health + --- + # feedback + float32 progress + + ``` + - Before calling triggered calibration, user should set the following parameters: + - `depth_module.visual_preset: 1` # switch to visual preset #1 in depth module + - `depth_module.emitter_enabled: true` # enable emitter in depth module + - `depth_module.enable_auto_exposure: true` # enable AE in depth moudle + - `enable_depth: false` # turn off depth stream + - `enable_infra1: false` # turn off infra1 stream + - `enable_infra2: false` # turn off infra2 stream + - To use from command line: `ros2 action send_goal /camera/camera/triggered_calibration realsense2_camera_msgs/action/TriggeredCalibration '{json: "{calib run}"}'` or even with an empty request `ros2 action send_goal /camera/camera/triggered_calibration realsense2_camera_msgs/action/TriggeredCalibration ''` because the default behavior is already calib run. + - The action gives an updated feedback about the progress (%) if the client asks for feedback. To do that, add `--feedback` to the end of the command. + - If succeded, the action writes the new calibration table to the flash. It also returns the new calibration table as json string and the health as float32 + - If failed, it will return the error message inside the result. For example: + ``` + Result: + success: false + error_msg: 'TriggeredCalibrationExecute: Aborted. Error: Calibration completed but algorithm failed' + calibration: '{}' + health: 0.0 + ```
@@ -578,10 +766,16 @@ ros2 launch realsense2_camera rs_intra_process_demo_launch.py intra_process_comm [rolling-badge]: https://img.shields.io/badge/-ROLLING-orange?style=flat-square&logo=ros [rolling]: https://docs.ros.org/en/rolling/index.html +[jazzy-badge]: https://img.shields.io/badge/-JAZZY-orange?style=flat-square&logo=ros +[jazzy]: https://docs.ros.org/en/jazzy/index.html +[foxy-badge]: https://img.shields.io/badge/-FOXY-orange?style=flat-square&logo=ros +[foxy]: https://docs.ros.org/en/foxy/index.html [humble-badge]: https://img.shields.io/badge/-HUMBLE-orange?style=flat-square&logo=ros [humble]: https://docs.ros.org/en/humble/index.html [iron-badge]: https://img.shields.io/badge/-IRON-orange?style=flat-square&logo=ros [iron]: https://docs.ros.org/en/iron/index.html +[ubuntu24-badge]: https://img.shields.io/badge/-UBUNTU%2024%2E04-blue?style=flat-square&logo=ubuntu&logoColor=white +[ubuntu24]: https://releases.ubuntu.com/noble/ [ubuntu22-badge]: https://img.shields.io/badge/-UBUNTU%2022%2E04-blue?style=flat-square&logo=ubuntu&logoColor=white [ubuntu22]: https://releases.ubuntu.com/jammy/ [ubuntu20-badge]: https://img.shields.io/badge/-UBUNTU%2020%2E04-blue?style=flat-square&logo=ubuntu&logoColor=white diff --git a/realsense2_camera/CHANGELOG.rst b/realsense2_camera/CHANGELOG.rst index a1923a8e60..9098646e47 100644 --- a/realsense2_camera/CHANGELOG.rst +++ b/realsense2_camera/CHANGELOG.rst @@ -2,6 +2,56 @@ Changelog for package realsense2_camera ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +4.55.1 (2024-05-28) +------------------- +* PR `#3106 `_ from SamerKhshiboun: Remove unused parameter _is_profile_exist +* PR `#3098 `_ from kadiredd: ROS live cam test fixes +* PR `#3094 `_ from kadiredd: ROSCI infra for live camera testing +* PR `#3066 `_ from SamerKhshiboun: Revert Foxy Build Support (From Source) +* PR `#3052 `_ from Arun-Prasad-V: Support for selecting profile for each stream_type +* PR `#3056 `_ from SamerKhshiboun: Add documentation for RealSense ROS2 Wrapper Windows installation +* PR `#3049 `_ from Arun-Prasad-V: Applying Colorizer filter to Aligned-Depth image +* PR `#3053 `_ from Nir-Az: Fix Coverity issues + remove empty warning log +* PR `#3007 `_ from Arun-Prasad-V: Skip updating Exp 1,2 & Gain 1,2 when HDR is disabled +* PR `#3042 `_ from kadiredd: Assert Fail if camera not found +* PR `#3008 `_ from Arun-Prasad-V: Renamed GL GPU enable param +* PR `#2989 `_ from Arun-Prasad-V: Dynamically switching b/w CPU & GPU processing +* PR `#3001 `_ from deep0294: Update ReadMe to run ROS2 Unit Test +* PR `#2998 `_ from SamerKhshiboun: fix calibration intrinsic fail +* PR `#2987 `_ from SamerKhshiboun: Remove D465 SKU +* PR `#2984 `_ from deep0294: Fix All Profiles Test +* PR `#2956 `_ from Arun-Prasad-V: Extending LibRS's GL support to RS ROS2 +* PR `#2953 `_ from Arun-Prasad-V: Added urdf & mesh files for D405 model +* PR `#2940 `_ from Arun-Prasad-V: Fixing the data_type of ROS Params exposure & gain +* PR `#2948 `_ from Arun-Prasad-V: Disabling HDR during INIT +* PR `#2934 `_ from Arun-Prasad-V: Disabling hdr while updating exposure & gain values +* PR `#2946 `_ from gwen2018: fix ros random crash with error hw monitor command for asic temperature failed +* PR `#2865 `_ from PrasRsRos: add live camera tests +* PR `#2891 `_ from Arun-Prasad-V: revert PR2872 +* PR `#2853 `_ from Arun-Prasad-V: Frame latency for the '/topic' provided by user +* PR `#2872 `_ from Arun-Prasad-V: Updating _camera_name with RS node's name +* PR `#2878 `_ from Arun-Prasad-V: Updated ros2 examples and readme +* PR `#2841 `_ from SamerKhshiboun: Remove Dashing, Eloquent, Foxy, L500 and SR300 support +* PR `#2868 `_ from Arun-Prasad-V: Fix Pointcloud topic frame_id +* PR `#2849 `_ from Arun-Prasad-V: Create /imu topic only when motion streams enabled +* PR `#2847 `_ from Arun-Prasad-V: Updated rs_launch param names +* PR `#2839 `_ from Arun-Prasad: Added ros2 examples +* PR `#2861 `_ from SamerKhshiboun: fix readme and nodefactory for ros2 run +* PR `#2859 `_ from PrasRsRos: Fix tests (topic now has camera name) +* PR `#2857 `_ from lge-ros2: Apply camera name in topics +* PR `#2840 `_ from SamerKhshiboun: Support Depth, IR and Color formats in ROS2 +* PR `#2764 `_ from lge-ros2 : support modifiable camera namespace +* PR `#2830 `_ from SamerKhshiboun: Add RGBD + reduce changes between hkr and development +* PR `#2811 `_ from Arun-Prasad-V: Exposing stream formats params to user +* PR `#2825 `_ from SamerKhshiboun: Fix align_depth + add test +* PR `#2822 `_ from Arun-Prasad-V: Updated rs_launch configurations +* PR `#2726 `_ from PrasRsRos: Integration test template +* PR `#2742 `_ from danielhonies:Update rs_launch.py +* PR `#2806 `_ from Arun-Prasad-V: Enabling RGB8 Infrared stream +* PR `#2799 `_ from SamerKhshiboun: Fix overriding frames on same topics/CV-images due to a bug in PR2759 +* PR `#2759 `_ from SamerKhshiboun: Cleanups and name fixes +* Contributors: (=YG=) Hyunseok Yang, Arun Prasad, Arun-Prasad-V, Daniel Honies, Hyunseok, Madhukar Reddy Kadireddy, Nir, Nir Azkiel, PrasRsRos, Samer Khshiboun, SamerKhshiboun, deep0294, gwen2018, nairps + 4.54.1 (2023-06-27) ------------------- * Applying AlignDepth filter after Pointcloud diff --git a/realsense2_camera/CMakeLists.txt b/realsense2_camera/CMakeLists.txt index 67361a948c..0e78eb1957 100644 --- a/realsense2_camera/CMakeLists.txt +++ b/realsense2_camera/CMakeLists.txt @@ -1,4 +1,4 @@ -# Copyright 2023 Intel Corporation. All Rights Reserved. +# Copyright 2024 Intel Corporation. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -107,16 +107,23 @@ find_package(builtin_interfaces REQUIRED) find_package(cv_bridge REQUIRED) find_package(image_transport REQUIRED) find_package(rclcpp REQUIRED) +find_package(rclcpp_action REQUIRED) find_package(rclcpp_components REQUIRED) find_package(realsense2_camera_msgs REQUIRED) +find_package(std_srvs REQUIRED) find_package(std_msgs REQUIRED) find_package(sensor_msgs REQUIRED) find_package(nav_msgs REQUIRED) find_package(tf2_ros REQUIRED) find_package(tf2 REQUIRED) find_package(diagnostic_updater REQUIRED) +find_package(OpenCV REQUIRED COMPONENTS core) +find_package(fastrtps REQUIRED) # TODO: LRS-1203 - This is a workaround, till librealsense2 cmake config will be fixed -find_package(realsense2 2.54.1) +find_package(realsense2 2.56.0) +if (BUILD_ACCELERATE_GPU_WITH_GLSL) + find_package(realsense2-gl 2.56.0) +endif() if(NOT realsense2_FOUND) message(FATAL_ERROR "\n\n Intel RealSense SDK 2.0 is missing, please install it from https://github.com/IntelRealSense/librealsense/releases\n\n") endif() @@ -124,6 +131,8 @@ endif() #set(CMAKE_NO_SYSTEM_FROM_IMPORTED true) include_directories(include) +include_directories(${OpenCV_INCLUDE_DIRS}) # add OpenCV includes to the included dirs + set(node_plugins "") set(SOURCES @@ -139,12 +148,21 @@ set(SOURCES src/profile_manager.cpp src/image_publisher.cpp src/tfs.cpp + src/actions.cpp ) +if (BUILD_ACCELERATE_GPU_WITH_GLSL) + list(APPEND SOURCES src/gl_gpu_processing.cpp) +endif() + if(NOT DEFINED ENV{ROS_DISTRO}) message(FATAL_ERROR "ROS_DISTRO is not defined." ) endif() -if("$ENV{ROS_DISTRO}" STREQUAL "humble") +if("$ENV{ROS_DISTRO}" STREQUAL "foxy") + message(STATUS "Build for ROS2 Foxy") + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -DFOXY") + set(SOURCES "${SOURCES}" src/ros_param_backend.cpp) +elseif("$ENV{ROS_DISTRO}" STREQUAL "humble") message(STATUS "Build for ROS2 Humble") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -DHUMBLE") set(SOURCES "${SOURCES}" src/ros_param_backend.cpp) @@ -156,6 +174,10 @@ elseif("$ENV{ROS_DISTRO}" STREQUAL "rolling") message(STATUS "Build for ROS2 Rolling") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -DROLLING") set(SOURCES "${SOURCES}" src/ros_param_backend.cpp) +elseif("$ENV{ROS_DISTRO}" STREQUAL "jazzy") + message(STATUS "Build for ROS2 Jazzy") + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -DJAZZY") + set(SOURCES "${SOURCES}" src/ros_param_backend.cpp) else() message(FATAL_ERROR "Unsupported ROS Distribution: " "$ENV{ROS_DISTRO}") endif() @@ -171,6 +193,10 @@ if(${rclcpp_VERSION} VERSION_GREATER_EQUAL "17.0") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -DRCLCPP_HAS_OnSetParametersCallbackType") endif() +if (BUILD_ACCELERATE_GPU_WITH_GLSL) + add_definitions(-DACCELERATE_GPU_WITH_GLSL) +endif() + set(INCLUDES include/constants.h include/realsense_node_factory.h @@ -184,6 +210,9 @@ set(INCLUDES include/profile_manager.h include/image_publisher.h) +if (BUILD_ACCELERATE_GPU_WITH_GLSL) + list(APPEND INCLUDES include/gl_window.h) +endif() if (BUILD_TOOLS) @@ -200,9 +229,17 @@ add_library(${PROJECT_NAME} SHARED ${SOURCES} ) +if (BUILD_ACCELERATE_GPU_WITH_GLSL) + set(link_libraries ${realsense2-gl_LIBRARY}) +else() + set(link_libraries ${realsense2_LIBRARY}) +endif() + +list(APPEND link_libraries ${OpenCV_LIBS}) # add OpenCV libs to link_libraries + target_link_libraries(${PROJECT_NAME} - ${realsense2_LIBRARY} - ) + ${link_libraries} +) set(dependencies cv_bridge @@ -210,15 +247,21 @@ set(dependencies rclcpp rclcpp_components realsense2_camera_msgs + std_srvs std_msgs sensor_msgs nav_msgs tf2 - realsense2 tf2_ros diagnostic_updater ) +if (BUILD_ACCELERATE_GPU_WITH_GLSL) + list(APPEND dependencies realsense2-gl) +else() + list(APPEND dependencies realsense2) +endif() + ament_target_dependencies(${PROJECT_NAME} ${dependencies} ) @@ -278,6 +321,7 @@ if(BUILD_TESTING) $ ) ament_target_dependencies(${_test_name} + std_srvs std_msgs ) #target_link_libraries(${_test_name} name_of_local_library) diff --git a/realsense2_camera/examples/d500_tables/calib_config_example.json b/realsense2_camera/examples/d500_tables/calib_config_example.json new file mode 100644 index 0000000000..6346ca5498 --- /dev/null +++ b/realsense2_camera/examples/d500_tables/calib_config_example.json @@ -0,0 +1,45 @@ +{ + "calibration_config": + { + "roi_num_of_segments": 2, + "roi_0": + { + "vertex_0": [ 0, 36 ], + "vertex_1": [ 640, 144 ], + "vertex_2": [ 640, 576 ], + "vertex_3": [ 0, 684 ] + }, + "roi_1": + { + "vertex_0": [ 640, 144 ], + "vertex_1": [ 1280, 35 ], + "vertex_2": [ 1280, 684 ], + "vertex_3": [ 640, 576 ] + }, + "roi_2": + { + "vertex_0": [ 0, 0 ], + "vertex_1": [ 0, 0 ], + "vertex_2": [ 0, 0 ], + "vertex_3": [ 0, 0 ] + }, + "roi_3": + { + "vertex_0": [ 0, 0 ], + "vertex_1": [ 0, 0 ], + "vertex_2": [ 0, 0 ], + "vertex_3": [ 0, 0 ] + }, + "camera_position": + { + "rotation": + [ + [ 0.0, 0.0, 1.0], + [-1.0, 0.0, 0.0], + [ 0.0, -1.0, 0.0] + ], + "translation": [0.0, 0.0, 0.27] + }, + "crypto_signature": [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] + } +} diff --git a/realsense2_camera/examples/launch_params_from_file/README.md b/realsense2_camera/examples/launch_params_from_file/README.md index ce9980aa5b..f12903a25f 100644 --- a/realsense2_camera/examples/launch_params_from_file/README.md +++ b/realsense2_camera/examples/launch_params_from_file/README.md @@ -24,7 +24,7 @@ param2: value Example: ``` enable_color: true -rgb_camera.profile: 1280x720x15 +rgb_camera.color_profile: 1280x720x15 enable_depth: true align_depth.enable: true enable_sync: true diff --git a/realsense2_camera/examples/launch_params_from_file/config/config.yaml b/realsense2_camera/examples/launch_params_from_file/config/config.yaml index d15a19a0fe..9d63fec7fe 100644 --- a/realsense2_camera/examples/launch_params_from_file/config/config.yaml +++ b/realsense2_camera/examples/launch_params_from_file/config/config.yaml @@ -1,5 +1,5 @@ enable_color: true -rgb_camera.profile: 1280x720x15 +rgb_camera.color_profile: 1280x720x15 enable_depth: true align_depth.enable: true enable_sync: true diff --git a/realsense2_camera/include/base_realsense_node.h b/realsense2_camera/include/base_realsense_node.h index 9cb948d696..7d4a2c42c1 100755 --- a/realsense2_camera/include/base_realsense_node.h +++ b/realsense2_camera/include/base_realsense_node.h @@ -27,11 +27,16 @@ #include #include +#include #include "realsense2_camera_msgs/msg/imu_info.hpp" #include "realsense2_camera_msgs/msg/extrinsics.hpp" #include "realsense2_camera_msgs/msg/metadata.hpp" #include "realsense2_camera_msgs/msg/rgbd.hpp" #include "realsense2_camera_msgs/srv/device_info.hpp" +#include "realsense2_camera_msgs/srv/calib_config_read.hpp" +#include "realsense2_camera_msgs/srv/calib_config_write.hpp" +#include "rclcpp_action/rclcpp_action.hpp" +#include "realsense2_camera_msgs/action/triggered_calibration.hpp" #include #include @@ -49,6 +54,10 @@ #include #include +#if defined (ACCELERATE_GPU_WITH_GLSL) +#include +#endif + #include #include #include @@ -114,6 +123,8 @@ namespace realsense2_camera void publishTopics(); public: + using TriggeredCalibration = realsense2_camera_msgs::action::TriggeredCalibration; + using GoalHandleTriggeredCalibration = rclcpp_action::ServerGoalHandle; enum class imu_sync_method{NONE, COPY, LINEAR_INTERPOLATION}; protected: @@ -145,15 +156,33 @@ namespace realsense2_camera std::string _camera_name; std::vector _monitor_options; rclcpp::Logger _logger; + rclcpp::Service::SharedPtr _reset_srv; rclcpp::Service::SharedPtr _device_info_srv; + rclcpp::Service::SharedPtr _calib_config_read_srv; + rclcpp::Service::SharedPtr _calib_config_write_srv; + rclcpp_action::Server::SharedPtr _triggered_calibration_action_server; + std::shared_ptr _parameters; std::list _parameters_names; void restartStaticTransformBroadcaster(); void publishExtrinsicsTopic(const stream_index_pair& sip, const rs2_extrinsics& ex); void calcAndAppendTransformMsgs(const rs2::stream_profile& profile, const rs2::stream_profile& base_profile); + + void handleHWReset(const std_srvs::srv::Empty::Request::SharedPtr req, + const std_srvs::srv::Empty::Response::SharedPtr res); void getDeviceInfo(const realsense2_camera_msgs::srv::DeviceInfo::Request::SharedPtr req, realsense2_camera_msgs::srv::DeviceInfo::Response::SharedPtr res); + void CalibConfigReadService(const realsense2_camera_msgs::srv::CalibConfigRead::Request::SharedPtr req, + realsense2_camera_msgs::srv::CalibConfigRead::Response::SharedPtr res); + void CalibConfigWriteService(const realsense2_camera_msgs::srv::CalibConfigWrite::Request::SharedPtr req, + realsense2_camera_msgs::srv::CalibConfigWrite::Response::SharedPtr res); + + rclcpp_action::GoalResponse TriggeredCalibrationHandleGoal(const rclcpp_action::GoalUUID & uuid, std::shared_ptr goal); + rclcpp_action::CancelResponse TriggeredCalibrationHandleCancel(const std::shared_ptr goal_handle); + void TriggeredCalibrationHandleAccepted(const std::shared_ptr goal_handle); + void TriggeredCalibrationExecute(const std::shared_ptr goal_handle); + tf2::Quaternion rotationMatrixToQuaternion(const float rotation[9]) const; void append_static_tf_msg(const rclcpp::Time& t, const float3& trans, @@ -169,7 +198,7 @@ namespace realsense2_camera class CimuData { public: - CimuData() : m_time_ns(-1) {}; + CimuData() : m_data({0,0,0}), m_time_ns(-1) {}; CimuData(const stream_index_pair type, Eigen::Vector3d data, double time): m_type(type), m_data(data), @@ -256,11 +285,20 @@ namespace realsense2_camera void setAvailableSensors(); void setCallbackFunctions(); void updateSensors(); + void startUpdatedSensors(); + void stopRequiredSensors(); void publishServices(); + void publishActions(); void startPublishers(const std::vector& profiles, const RosSensor& sensor); void startRGBDPublisherIfNeeded(); void stopPublishers(const std::vector& profiles); +#if defined (ACCELERATE_GPU_WITH_GLSL) + void initOpenGLProcessing(bool use_gpu_processing); + void shutdownOpenGLProcessing(); + void glfwPollEventCallback(); +#endif + rs2::device _dev; std::map _sensors; std::map> _sensors_callback; @@ -275,10 +313,10 @@ namespace realsense2_camera double _publish_fps; double _color_fps; + double _color_freq_tolerance; unsigned int _frame_counter = 1; unsigned int _frames_to_skip = 0; - std::map _stream_intrinsics; std::map _enable; bool _publish_tf; double _tf_publish_rate, _diagnostics_period; @@ -347,6 +385,12 @@ namespace realsense2_camera std::shared_ptr _diagnostics_updater; rs2::stream_profile _base_profile; +#if defined (ACCELERATE_GPU_WITH_GLSL) + GLwindow _app; + rclcpp::TimerBase::SharedPtr _timer; + bool _accelerate_gpu_with_glsl; + bool _is_accelerate_gpu_with_glsl_changed; +#endif };//end class } diff --git a/realsense2_camera/include/constants.h b/realsense2_camera/include/constants.h index 0f99fee585..c8904a14eb 100644 --- a/realsense2_camera/include/constants.h +++ b/realsense2_camera/include/constants.h @@ -18,7 +18,7 @@ #include #define REALSENSE_ROS_MAJOR_VERSION 4 -#define REALSENSE_ROS_MINOR_VERSION 54 +#define REALSENSE_ROS_MINOR_VERSION 55 #define REALSENSE_ROS_PATCH_VERSION 1 #define STRINGIFY(arg) #arg @@ -53,6 +53,7 @@ namespace realsense2_camera const uint16_t RS430_MM_PID = 0x0ad5; // AWGT const uint16_t RS_USB2_PID = 0x0ad6; // USB2 const uint16_t RS420_PID = 0x0af6; // PWG + const uint16_t RS421_PID = 0x1155; // D421 const uint16_t RS420_MM_PID = 0x0afe; // PWGT const uint16_t RS410_MM_PID = 0x0aff; // ASR const uint16_t RS400_MM_PID = 0x0b00; // PSR @@ -60,12 +61,12 @@ namespace realsense2_camera const uint16_t RS460_PID = 0x0b03; // DS5U const uint16_t RS435_RGB_PID = 0x0b07; // AWGC const uint16_t RS435i_RGB_PID = 0x0B3A; // AWGC_MM - const uint16_t RS465_PID = 0x0b4d; // D465 const uint16_t RS416_RGB_PID = 0x0B52; // F416 RGB const uint16_t RS430i_PID = 0x0b4b; // D430i const uint16_t RS405_PID = 0x0B5B; // DS5U const uint16_t RS455_PID = 0x0B5C; // D455 - const uint16_t RS457_PID = 0xABCD; // D457 + const uint16_t RS457_PID = 0xABCD; // D457 + const uint16_t RS555_PID = 0x0B56; // D555 const bool ALLOW_NO_TEXTURE_POINTS = false; const bool ORDERED_PC = false; diff --git a/realsense2_camera/include/dynamic_params.h b/realsense2_camera/include/dynamic_params.h index b87caea5c0..d438ad937b 100644 --- a/realsense2_camera/include/dynamic_params.h +++ b/realsense2_camera/include/dynamic_params.h @@ -46,7 +46,10 @@ namespace realsense2_camera template void queueSetRosValue(const std::string& param_name, const T value); - + + template + T getParam(std::string param_name); + private: void monitor_update_functions(); diff --git a/realsense2_camera/include/gl_window.h b/realsense2_camera/include/gl_window.h new file mode 100644 index 0000000000..2bca9d4eb5 --- /dev/null +++ b/realsense2_camera/include/gl_window.h @@ -0,0 +1,84 @@ +// Copyright 2023 Intel Corporation. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#pragma once + +#include // Include RealSense Cross Platform API + +#if defined (ACCELERATE_GPU_WITH_GLSL) + +#define GL_SILENCE_DEPRECATION +#define GLFW_INCLUDE_GLU +#include +#include +#include + +#include // Include GPU-Processing API + + +#ifndef PI +#define PI 3.14159265358979323846 +#define PI_FL 3.141592f +#endif + + +class GLwindow +{ +public: + + GLwindow(int width, int height, const char* title) + : _width(width), _height(height) + { + glfwInit(); + glfwWindowHint(GLFW_VISIBLE, 0); + win = glfwCreateWindow(width, height, title, nullptr, nullptr); + if (!win) + throw std::runtime_error("Could not open OpenGL window, please check your graphic drivers or use the textual SDK tools"); + glfwMakeContextCurrent(win); + + glfwSetWindowUserPointer(win, this); + + } + + ~GLwindow() + { + glfwDestroyWindow(win); + glfwTerminate(); + } + + void close() + { + glfwSetWindowShouldClose(win, 1); + } + + float width() const { return float(_width); } + float height() const { return float(_height); } + + operator bool() + { + auto res = !glfwWindowShouldClose(win); + + glfwPollEvents(); + + return res; + } + + operator GLFWwindow* () { return win; } + +private: + GLFWwindow* win; + int _width, _height; +}; + +#endif diff --git a/realsense2_camera/include/profile_manager.h b/realsense2_camera/include/profile_manager.h index 3021e95f6c..736c440f39 100644 --- a/realsense2_camera/include/profile_manager.h +++ b/realsense2_camera/include/profile_manager.h @@ -68,23 +68,22 @@ namespace realsense2_camera VideoProfilesManager(std::shared_ptr parameters, const std::string& module_name, rclcpp::Logger logger, bool force_image_default_qos = false); bool isWantedProfile(const rs2::stream_profile& profile) override; void registerProfileParameters(std::vector all_profiles, std::function update_sensor_func) override; - int getHeight() {return _height;}; - int getWidth() {return _width;}; - int getFPS() {return _fps;}; + int getHeight(rs2_stream stream_type) {return _height[stream_type];}; + int getWidth(rs2_stream stream_type) {return _width[stream_type];}; + int getFPS(rs2_stream stream_type) {return _fps[stream_type];}; private: bool isSameProfileValues(const rs2::stream_profile& profile, const int width, const int height, const int fps, const rs2_format format); + rs2::stream_profile validateAndGetSuitableProfile(rs2_stream stream_type, rs2::stream_profile given_profile); void registerVideoSensorProfileFormat(stream_index_pair sip); void registerVideoSensorParams(std::set sips); - std::string get_profiles_descriptions(); + std::string get_profiles_descriptions(rs2_stream stream_type); std::string getProfileFormatsDescriptions(stream_index_pair sip); private: std::string _module_name; std::map _formats; - int _fps; - int _width, _height; - bool _is_profile_exist; + std::map _fps, _width, _height; bool _force_image_default_qos; }; diff --git a/realsense2_camera/include/ros_sensor.h b/realsense2_camera/include/ros_sensor.h index 57d224300e..f18bea7268 100644 --- a/realsense2_camera/include/ros_sensor.h +++ b/realsense2_camera/include/ros_sensor.h @@ -32,10 +32,10 @@ namespace realsense2_camera class FrequencyDiagnostics { public: - FrequencyDiagnostics(std::string name, int expected_frequency, std::shared_ptr updater): + FrequencyDiagnostics(std::string name, int expected_frequency, double color_freq_tolerance, std::shared_ptr updater): _name(name), - _min_freq(expected_frequency), _max_freq(expected_frequency), - _freq_status_param(&_min_freq, &_max_freq, 0.1, 10), + _min_freq(expected_frequency), _max_freq(expected_frequency),_color_freq_tolerance(color_freq_tolerance), + _freq_status_param(&_min_freq, &_max_freq, _color_freq_tolerance, 10), _freq_status(_freq_status_param, _name), _p_updater(updater) { @@ -46,7 +46,8 @@ namespace realsense2_camera _name(other._name), _min_freq(other._min_freq), _max_freq(other._max_freq), - _freq_status_param(&_min_freq, &_max_freq, 0.1, 10), + _color_freq_tolerance(other._color_freq_tolerance), + _freq_status_param(&_min_freq, &_max_freq, _color_freq_tolerance, 10), _freq_status(_freq_status_param, _name), _p_updater(other._p_updater) { @@ -64,7 +65,7 @@ namespace realsense2_camera private: std::string _name; - double _min_freq, _max_freq; + double _min_freq, _max_freq, _color_freq_tolerance; diagnostic_updater::FrequencyStatusParam _freq_status_param; diagnostic_updater::FrequencyStatus _freq_status; std::shared_ptr _p_updater; @@ -80,6 +81,7 @@ namespace realsense2_camera std::function update_sensor_func, std::function hardware_reset_func, std::shared_ptr diagnostics_updater, + double color_freq_tolerance, rclcpp::Logger logger, bool force_image_default_qos = false, bool is_rosbag_file = false); @@ -120,6 +122,7 @@ namespace realsense2_camera rs2::region_of_interest _auto_exposure_roi; std::vector _parameters_names; std::shared_ptr _diagnostics_updater; + double _color_freq_tolerance; std::map _frequency_diagnostics; bool _force_image_default_qos; }; diff --git a/realsense2_camera/include/ros_utils.h b/realsense2_camera/include/ros_utils.h index feccd4647d..6980d2bf32 100644 --- a/realsense2_camera/include/ros_utils.h +++ b/realsense2_camera/include/ros_utils.h @@ -33,6 +33,7 @@ namespace realsense2_camera const stream_index_pair INFRA2{RS2_STREAM_INFRARED, 2}; const stream_index_pair GYRO{RS2_STREAM_GYRO, 0}; const stream_index_pair ACCEL{RS2_STREAM_ACCEL, 0}; + const stream_index_pair MOTION{RS2_STREAM_MOTION, 0}; bool isValidCharInName(char c); @@ -42,6 +43,6 @@ namespace realsense2_camera const rmw_qos_profile_t qos_string_to_qos(std::string str); const std::string list_available_qos_strings(); rs2_format string_to_rs2_format(std::string str); - + std::string vectorToJsonString(const std::vector& vec); } diff --git a/realsense2_camera/launch/rs_launch.py b/realsense2_camera/launch/rs_launch.py index 7430febf85..aa262693df 100644 --- a/realsense2_camera/launch/rs_launch.py +++ b/realsense2_camera/launch/rs_launch.py @@ -29,22 +29,25 @@ {'name': 'config_file', 'default': "''", 'description': 'yaml config file'}, {'name': 'json_file_path', 'default': "''", 'description': 'allows advanced configuration'}, {'name': 'initial_reset', 'default': 'false', 'description': "''"}, + {'name': 'accelerate_gpu_with_glsl', 'default': "false", 'description': 'enable GPU acceleration with GLSL'}, {'name': 'rosbag_filename', 'default': "''", 'description': 'A realsense bagfile to run from as a device'}, {'name': 'log_level', 'default': 'info', 'description': 'debug log level [DEBUG|INFO|WARN|ERROR|FATAL]'}, {'name': 'output', 'default': 'screen', 'description': 'pipe node output [screen|log]'}, {'name': 'enable_color', 'default': 'true', 'description': 'enable color stream'}, - {'name': 'rgb_camera.profile', 'default': '0,0,0', 'description': 'color image width'}, + {'name': 'rgb_camera.color_profile', 'default': '0,0,0', 'description': 'color stream profile'}, {'name': 'rgb_camera.color_format', 'default': 'RGB8', 'description': 'color stream format'}, {'name': 'rgb_camera.enable_auto_exposure', 'default': 'true', 'description': 'enable/disable auto exposure for color image'}, {'name': 'enable_depth', 'default': 'true', 'description': 'enable depth stream'}, {'name': 'enable_infra', 'default': 'false', 'description': 'enable infra0 stream'}, {'name': 'enable_infra1', 'default': 'false', 'description': 'enable infra1 stream'}, {'name': 'enable_infra2', 'default': 'false', 'description': 'enable infra2 stream'}, - {'name': 'depth_module.profile', 'default': '0,0,0', 'description': 'depth module profile'}, + {'name': 'depth_module.depth_profile', 'default': '0,0,0', 'description': 'depth stream profile'}, {'name': 'depth_module.depth_format', 'default': 'Z16', 'description': 'depth stream format'}, + {'name': 'depth_module.infra_profile', 'default': '0,0,0', 'description': 'infra streams (0/1/2) profile'}, {'name': 'depth_module.infra_format', 'default': 'RGB8', 'description': 'infra0 stream format'}, {'name': 'depth_module.infra1_format', 'default': 'Y8', 'description': 'infra1 stream format'}, {'name': 'depth_module.infra2_format', 'default': 'Y8', 'description': 'infra2 stream format'}, + {'name': 'depth_module.color_profile', 'default': '0,0,0', 'description': 'Depth module color stream profile'}, {'name': 'depth_module.exposure', 'default': '8500', 'description': 'Depth module manual exposure value'}, {'name': 'depth_module.gain', 'default': '16', 'description': 'Depth module manual gain value'}, {'name': 'depth_module.hdr_enabled', 'default': 'false', 'description': 'Depth module hdr enablement flag. Used for hdr_merge filter'}, @@ -54,10 +57,12 @@ {'name': 'depth_module.exposure.2', 'default': '1', 'description': 'Depth module second exposure value. Used for hdr_merge filter'}, {'name': 'depth_module.gain.2', 'default': '16', 'description': 'Depth module second gain value. Used for hdr_merge filter'}, {'name': 'enable_sync', 'default': 'false', 'description': "'enable sync mode'"}, + {'name': 'depth_module.inter_cam_sync_mode', 'default': "0", 'description': '[0-Default, 1-Master, 2-Slave]'}, {'name': 'enable_rgbd', 'default': 'false', 'description': "'enable rgbd topic'"}, {'name': 'enable_gyro', 'default': 'false', 'description': "'enable gyro stream'"}, {'name': 'enable_accel', 'default': 'false', 'description': "'enable accel stream'"}, {'name': 'gyro_fps', 'default': '0', 'description': "''"}, + {'name': 'enable_motion', 'default': 'false', 'description': "'enable motion stream (IMU) for DDS devices'"}, {'name': 'accel_fps', 'default': '0', 'description': "''"}, {'name': 'unite_imu_method', 'default': "0", 'description': '[0-None, 1-copy, 2-linear_interpolation]'}, {'name': 'clip_distance', 'default': '-2.', 'description': "''"}, @@ -81,7 +86,7 @@ {'name': 'hdr_merge.enable', 'default': 'false', 'description': 'hdr_merge filter enablement flag'}, {'name': 'wait_for_device_timeout', 'default': '-1.', 'description': 'Timeout for waiting for device to connect (Seconds)'}, {'name': 'reconnect_timeout', 'default': '6.', 'description': 'Timeout(seconds) between consequtive reconnection attempts'}, - {'name': 'publish_fps', 'default': '2.', 'description': 'Frame publishing frequency'}, + {'name': 'color_freq_tolerance', 'default': '0.1', 'description': 'Tolerance with which bounds must be satisfied'}, ] def declare_configurable_parameters(parameters): @@ -97,6 +102,14 @@ def yaml_to_dict(path_to_yaml): def launch_setup(context, params, param_name_suffix=''): _config_file = LaunchConfiguration('config_file' + param_name_suffix).perform(context) params_from_file = {} if _config_file == "''" else yaml_to_dict(_config_file) + + _output = LaunchConfiguration('output' + param_name_suffix) + if(os.getenv('ROS_DISTRO') == 'foxy'): + # Foxy doesn't support output as substitution object (LaunchConfiguration object) + # but supports it as string, so we fetch the string from this substitution object + # see related PR that was merged for humble, iron, rolling: https://github.com/ros2/launch/pull/577 + _output = context.perform_substitution(_output) + return [ launch_ros.actions.Node( package='realsense2_camera', @@ -104,7 +117,7 @@ def launch_setup(context, params, param_name_suffix=''): name=LaunchConfiguration('camera_name' + param_name_suffix), executable='realsense2_camera_node', parameters=[params, params_from_file], - output=LaunchConfiguration('output' + param_name_suffix), + output=_output, arguments=['--ros-args', '--log-level', LaunchConfiguration('log_level' + param_name_suffix)], emulate_tty=True, respawn=True, diff --git a/realsense2_camera/launch/rs_multi_camera_launch_sync.py b/realsense2_camera/launch/rs_multi_camera_launch_sync.py new file mode 100644 index 0000000000..de127d35fb --- /dev/null +++ b/realsense2_camera/launch/rs_multi_camera_launch_sync.py @@ -0,0 +1,83 @@ +# Copyright 2023 Intel Corporation. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# DESCRIPTION # +# ----------- # +# Use this launch file to launch 2 devices and enable the hardware synchronization. +# As describe in https://dev.intelrealsense.com/docs/multiple-depth-cameras-configuration both devices +# have to be connected using a sync cable. The devices will by default stream asynchronously. +# Using this launch file one device will operate as master and the other as slave. As a result they will +# capture at exactly the same time and rate. +# command line example: (to be adapted with the serial numbers or the used devices) +# ros2 launch realsense2_camera rs_multi_camera_launch_sync.py camera_name1:=cam_1 camera_name2:=cam_2 camera_namespace1:=camera camera_namespace2:=camera serial_no1:="'_031422250097'" serial_no2:="'_336222301921'" +# The value of the param can be checked using ros2 param get /camera/cam_1 depth_module.inter_cam_sync_mode and ros2 param get /camera/cam_2 depth_module.inter_cam_sync_mode + +"""Launch realsense2_camera node.""" +import copy +from launch import LaunchDescription, LaunchContext +import launch_ros.actions +from launch.actions import IncludeLaunchDescription, OpaqueFunction +from launch.substitutions import LaunchConfiguration, ThisLaunchFileDir +from launch.launch_description_sources import PythonLaunchDescriptionSource +import sys +import pathlib +sys.path.append(str(pathlib.Path(__file__).parent.absolute())) +import rs_launch + +local_parameters = [{'name': 'camera_name1', 'default': 'camera1', 'description': 'camera1 unique name'}, + {'name': 'camera_name2', 'default': 'camera2', 'description': 'camera2 unique name'}, + {'name': 'camera_namespace1', 'default': 'camera1', 'description': 'camera1 namespace'}, + {'name': 'camera_namespace2', 'default': 'camera2', 'description': 'camera2 namespace'}, + {'name': 'depth_module.inter_cam_sync_mode1', 'default': '1', 'description': 'master'}, + {'name': 'depth_module.inter_cam_sync_mode2', 'default': '2', 'description': 'slave'}, + ] + + +def set_configurable_parameters(local_params): + return dict([(param['original_name'], LaunchConfiguration(param['name'])) for param in local_params]) + +def duplicate_params(general_params, posix): + local_params = copy.deepcopy(general_params) + for param in local_params: + param['original_name'] = param['name'] + param['name'] += posix + return local_params + +def launch_static_transform_publisher_node(context : LaunchContext): + # dummy static transformation from camera1 to camera2 + node = launch_ros.actions.Node( + package = "tf2_ros", + executable = "static_transform_publisher", + arguments = ["0", "0", "0", "0", "0", "0", + context.launch_configurations['camera_name1'] + "_link", + context.launch_configurations['camera_name2'] + "_link"] + ) + return [node] + +def generate_launch_description(): + params1 = duplicate_params(rs_launch.configurable_parameters, '1') + params2 = duplicate_params(rs_launch.configurable_parameters, '2') + return LaunchDescription( + rs_launch.declare_configurable_parameters(local_parameters) + + rs_launch.declare_configurable_parameters(params1) + + rs_launch.declare_configurable_parameters(params2) + + [ + OpaqueFunction(function=rs_launch.launch_setup, + kwargs = {'params' : set_configurable_parameters(params1), + 'param_name_suffix': '1'}), + OpaqueFunction(function=rs_launch.launch_setup, + kwargs = {'params' : set_configurable_parameters(params2), + 'param_name_suffix': '2'}), + OpaqueFunction(function=launch_static_transform_publisher_node) + ]) diff --git a/realsense2_camera/package.xml b/realsense2_camera/package.xml index 6db46cff68..d4d53ae3eb 100644 --- a/realsense2_camera/package.xml +++ b/realsense2_camera/package.xml @@ -2,7 +2,7 @@ realsense2_camera - 4.54.1 + 4.55.1 RealSense camera package allowing access to Intel D400 3D cameras LibRealSense ROS Team Apache License 2.0 @@ -23,22 +23,23 @@ realsense2_camera_msgs sensor_msgs geometry_msgs + std_srvs std_msgs nav_msgs tf2 tf2_ros diagnostic_updater - ament_cmake_gtest - launch_testing - ament_cmake_pytest - launch_pytest - sensor_msgs_py - python3-numpy - python3-tqdm - sensor_msgs_py - python3-requests - tf2_ros_py - ros2topic + ament_cmake_gtest + launch_testing + ament_cmake_pytest + launch_pytest + sensor_msgs_py + python3-numpy + python3-tqdm + sensor_msgs_py + python3-requests + tf2_ros_py + ros2topic launch_ros ros_environment diff --git a/realsense2_camera/src/actions.cpp b/realsense2_camera/src/actions.cpp new file mode 100644 index 0000000000..4d65db97be --- /dev/null +++ b/realsense2_camera/src/actions.cpp @@ -0,0 +1,133 @@ +// Copyright 2024 Intel Corporation. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "../include/base_realsense_node.h" +using namespace realsense2_camera; +using namespace rs2; + +/*** + * Implementation of ROS2 Actions based on: + * ROS2 Actions Design: https://design.ros2.org/articles/actions.html + * ROS2 Actions Tutorials/Examples: https://docs.ros.org/en/rolling/Tutorials/Intermediate/Writing-an-Action-Server-Client/Cpp.html +*/ + +// Triggered Calibration Action Struct (Message) +/* +# request +string json "calib run" +--- +# result +string calibration +float32 health +--- +# feedback +float32 progress +*/ + +/*** + * A callback for handling new goals (requests) for Triggered Calibration + * This implementation just accepts all goals with no restriction on the json input +*/ +rclcpp_action::GoalResponse BaseRealSenseNode::TriggeredCalibrationHandleGoal(const rclcpp_action::GoalUUID & uuid, + std::shared_ptr goal) +{ + (void)uuid; // unused parameter + ROS_INFO_STREAM("TriggeredCalibrationAction: Received request with json " << goal->json); + return rclcpp_action::GoalResponse::ACCEPT_AND_EXECUTE; +} + +/*** + * A callback for handling cancel events + * This implementation just tells the client that it accepted the cancellation. +*/ +rclcpp_action::CancelResponse BaseRealSenseNode::TriggeredCalibrationHandleCancel(const std::shared_ptr goal_handle) +{ + (void)goal_handle; // unused parameter + ROS_INFO("TriggeredCalibrationAction: Received request to cancel"); + return rclcpp_action::CancelResponse::ACCEPT; +} + +/*** + * A callback that accepts a new goal (request) and starts processing it. + * Since the execution is a long-running operation, we spawn off a + * thread to do the actual work and return from handle_accepted quickly. +*/ +void BaseRealSenseNode::TriggeredCalibrationHandleAccepted(const std::shared_ptr goal_handle) +{ + using namespace std::placeholders; + ROS_INFO("TriggeredCalibrationAction: Request accepted"); + // this needs to return quickly to avoid blocking the executor, so spin up a new thread + std::thread{std::bind(&BaseRealSenseNode::TriggeredCalibrationExecute, this, _1), goal_handle}.detach(); +} + +/*** + * All processing and updates of Triggered Calibration operation + * are done in this execute method in the new thread called by the + * TriggeredCalibrationHandleAccepted() above. +*/ +void BaseRealSenseNode::TriggeredCalibrationExecute(const std::shared_ptr goal_handle) +{ + ROS_INFO("TriggeredCalibrationAction: Executing..."); + const auto goal = goal_handle->get_goal(); // get the TriggeredCalibration srv struct + auto feedback = std::make_shared(); + float & _progress = feedback->progress; + auto result = std::make_shared(); + + try + { + rs2::auto_calibrated_device ac_dev = _dev.as(); + float health = 0.f; // output health + int timeout_ms = 120000; // 2 minutes timout + + auto progress_callback = [&](const float progress) { + _progress = progress; + goal_handle->publish_feedback(feedback); + }; + + auto ans = ac_dev.run_on_chip_calibration(goal->json, + &health, + progress_callback, + timeout_ms); + + // the new calibration is the result without the first 3 bytes + rs2::calibration_table new_calib = std::vector(ans.begin() + 3, ans.end()); + + if (rclcpp::ok() && _progress == 100.0) + { + result->calibration = vectorToJsonString(new_calib); + result->health = health; + result->success = true; + goal_handle->succeed(result); + ROS_INFO("TriggeredCalibrationExecute: Succeded"); + } + else + { + result->calibration = "{}"; + result->success = false; + result->error_msg = "Canceled"; + goal_handle->canceled(result); + ROS_WARN("TriggeredCalibrationExecute: Canceled"); + } + } + catch(const std::runtime_error& e) + { + // exception must have been thrown from run_on_chip_calibration call + std::string error_msg = "TriggeredCalibrationExecute: Aborted. Error: " + std::string(e.what()); + result->calibration = "{}"; + result->success = false; + result->error_msg = error_msg; + goal_handle->abort(result); + ROS_ERROR(error_msg.c_str()); + } +} diff --git a/realsense2_camera/src/base_realsense_node.cpp b/realsense2_camera/src/base_realsense_node.cpp index f2f9594b21..adb758c0e9 100755 --- a/realsense2_camera/src/base_realsense_node.cpp +++ b/realsense2_camera/src/base_realsense_node.cpp @@ -34,7 +34,11 @@ SyncedImuPublisher::SyncedImuPublisher(rclcpp::Publisher: SyncedImuPublisher::~SyncedImuPublisher() { - PublishPendingMessages(); + try + { + PublishPendingMessages(); + } + catch(...){} // Not allowed to throw from Dtor } void SyncedImuPublisher::Publish(sensor_msgs::msg::Imu imu_msg) @@ -114,7 +118,13 @@ BaseRealSenseNode::BaseRealSenseNode(rclcpp::Node& node, _pointcloud(false), _imu_sync_method(imu_sync_method::NONE), _is_profile_changed(false), - _is_align_depth_changed(false) + _is_align_depth_changed(false), + _color_freq_tolerance(0.1) +#if defined (ACCELERATE_GPU_WITH_GLSL) + ,_app(1280, 720, "RS_GLFW_Window"), + _accelerate_gpu_with_glsl(false), + _is_accelerate_gpu_with_glsl_changed(false) +#endif { if ( use_intra_process ) { @@ -144,10 +154,14 @@ BaseRealSenseNode::~BaseRealSenseNode() _monitoring_pc->join(); } clearParameters(); - for(auto&& sensor : _available_ros_sensors) + try { - sensor->stop(); + for(auto&& sensor : _available_ros_sensors) + { + sensor->stop(); + } } + catch(...){} // Not allowed to throw from Dtor } void BaseRealSenseNode::hardwareResetRequest() @@ -229,14 +243,22 @@ void BaseRealSenseNode::setupFilters() _cv_mpc.notify_one(); }; - _colorizer_filter = std::make_shared(std::make_shared(), _parameters, _logger); - _filters.push_back(_colorizer_filter); - +#if defined (ACCELERATE_GPU_WITH_GLSL) + _colorizer_filter = std::make_shared(std::make_shared(), _parameters, _logger); + _pc_filter = std::make_shared(std::make_shared(), _node, _parameters, _logger); +#else + _colorizer_filter = std::make_shared(std::make_shared(), _parameters, _logger); _pc_filter = std::make_shared(std::make_shared(), _node, _parameters, _logger); +#endif + + // Apply PointCloud filter before applying Align-depth as it requires original depth image not aligned-depth image. _filters.push_back(_pc_filter); _align_depth_filter = std::make_shared(std::make_shared(RS2_STREAM_COLOR), update_align_depth_func, _parameters, _logger); _filters.push_back(_align_depth_filter); + + // Apply Colorizer filter after applying Align-Depth to get colorized aligned depth image. + _filters.push_back(_colorizer_filter); } cv::Mat& BaseRealSenseNode::fix_depth_scale(const cv::Mat& from_image, cv::Mat& to_image) @@ -459,7 +481,26 @@ void BaseRealSenseNode::imu_callback(rs2::frame frame) frame.get_profile().stream_index(), rs2_timestamp_domain_to_string(frame.get_frame_timestamp_domain())); - auto stream_index = (stream == GYRO.first)?GYRO:ACCEL; + stream_index_pair stream_index; + + if(stream == GYRO.first) + { + stream_index = GYRO; + } + else if(stream == ACCEL.first) + { + stream_index = ACCEL; + } + else if(stream == MOTION.first) + { + stream_index = MOTION; + } + else + { + ROS_ERROR("Unknown IMU stream type."); + return; + } + rclcpp::Time t(frameSystemTimeSec(frame)); if(_imu_publishers.find(stream_index) == _imu_publishers.end()) @@ -474,19 +515,41 @@ void BaseRealSenseNode::imu_callback(rs2::frame frame) ImuMessage_AddDefaultValues(imu_msg); imu_msg.header.frame_id = OPTICAL_FRAME_ID(stream_index); - auto crnt_reading = *(reinterpret_cast(frame.get_data())); - if (GYRO == stream_index) + if (MOTION == stream_index) { - imu_msg.angular_velocity.x = crnt_reading.x; - imu_msg.angular_velocity.y = crnt_reading.y; - imu_msg.angular_velocity.z = crnt_reading.z; + auto combined_motion_data = frame.as().get_combined_motion_data(); + + imu_msg.linear_acceleration.x = combined_motion_data.linear_acceleration.x; + imu_msg.linear_acceleration.y = combined_motion_data.linear_acceleration.y; + imu_msg.linear_acceleration.z = combined_motion_data.linear_acceleration.z; + + imu_msg.angular_velocity.x = combined_motion_data.angular_velocity.x; + imu_msg.angular_velocity.y = combined_motion_data.angular_velocity.y; + imu_msg.angular_velocity.z = combined_motion_data.angular_velocity.z; + + imu_msg.orientation.x = combined_motion_data.orientation.x; + imu_msg.orientation.y = combined_motion_data.orientation.y; + imu_msg.orientation.z = combined_motion_data.orientation.z; + imu_msg.orientation.w = combined_motion_data.orientation.w; + } - else if (ACCEL == stream_index) + else { - imu_msg.linear_acceleration.x = crnt_reading.x; - imu_msg.linear_acceleration.y = crnt_reading.y; - imu_msg.linear_acceleration.z = crnt_reading.z; + auto motion_data = frame.as().get_motion_data(); + if (GYRO == stream_index) + { + imu_msg.angular_velocity.x = motion_data.x; + imu_msg.angular_velocity.y = motion_data.y; + imu_msg.angular_velocity.z = motion_data.z; + } + else // ACCEL == stream_index + { + imu_msg.linear_acceleration.x = motion_data.x; + imu_msg.linear_acceleration.y = motion_data.y; + imu_msg.linear_acceleration.z = motion_data.z; + } } + imu_msg.header.stamp = t; _imu_publishers[stream_index]->publish(imu_msg); ROS_DEBUG("Publish %s stream", ros_stream_to_string(frame.get_profile().stream_type()).c_str()); @@ -497,16 +560,12 @@ void BaseRealSenseNode::imu_callback(rs2::frame frame) void BaseRealSenseNode::frame_callback(rs2::frame frame) { - //Skip frames in order for fps to match the one specified in publish_fps parameter - if (_frame_counter < _frames_to_skip) + //we know that we have less depth frames than color frames, so we can skip some frames after a depth frame + if (_frame_counter < _frames_to_skip - 2) { _frame_counter++; return; } - else - { - _frame_counter = 1; - } if (_synced_imu_publisher) _synced_imu_publisher->Pause(); @@ -540,6 +599,10 @@ void BaseRealSenseNode::frame_callback(rs2::frame frame) } // Clip depth_frame for max range: rs2::depth_frame original_depth_frame = frameset.get_depth_frame(); + if(!original_depth_frame){ + // skip frames which don't have depth frames + return; + } if (original_depth_frame && _clipping_distance > 0) { clip_depth(original_depth_frame, _clipping_distance); @@ -625,6 +688,9 @@ void BaseRealSenseNode::frame_callback(rs2::frame frame) } if (_synced_imu_publisher) _synced_imu_publisher->Resume(); + + // found frame, reset counter + _frame_counter = 1; } // frame_callback void BaseRealSenseNode::multiple_message_callback(rs2::frame frame, imu_sync_method sync_method) @@ -644,7 +710,11 @@ void BaseRealSenseNode::multiple_message_callback(rs2::frame frame, imu_sync_met bool BaseRealSenseNode::setBaseTime(double frame_time, rs2_timestamp_domain time_domain) { - ROS_WARN_ONCE(time_domain == RS2_TIMESTAMP_DOMAIN_SYSTEM_TIME ? "Frame metadata isn't available! (frame_timestamp_domain = RS2_TIMESTAMP_DOMAIN_SYSTEM_TIME)" : ""); + if (time_domain == RS2_TIMESTAMP_DOMAIN_SYSTEM_TIME) + { + ROS_WARN_ONCE("Frame metadata isn't available! (frame_timestamp_domain = RS2_TIMESTAMP_DOMAIN_SYSTEM_TIME)"); + } + if (time_domain == RS2_TIMESTAMP_DOMAIN_HARDWARE_CLOCK) { ROS_WARN("frame's time domain is HARDWARE_CLOCK. Timestamps may reset periodically."); @@ -676,7 +746,14 @@ rclcpp::Time BaseRealSenseNode::frameSystemTimeSec(rs2::frame frame) { double elapsed_camera_ns = millisecondsToNanoseconds(timestamp_ms - _camera_time_base); + /* + Fixing deprecated-declarations compilation error for EOL distro (foxy) + */ +#if defined(FOXY) + auto duration = rclcpp::Duration(elapsed_camera_ns); +#else auto duration = rclcpp::Duration::from_nanoseconds(elapsed_camera_ns); +#endif return rclcpp::Time(_ros_time_base + duration); } @@ -709,8 +786,19 @@ void BaseRealSenseNode::updateProfilesStreamCalibData(const std::vector + T Parameters::getParam(std::string param_name) + { + return _node.get_parameter(param_name).get_value(); + } + template void Parameters::setParamT(std::string param_name, bool& param, std::function func, rcl_interfaces::msg::ParameterDescriptor descriptor); template void Parameters::setParamT(std::string param_name, int& param, std::function func, rcl_interfaces::msg::ParameterDescriptor descriptor); template void Parameters::setParamT(std::string param_name, double& param, std::function func, rcl_interfaces::msg::ParameterDescriptor descriptor); @@ -284,4 +294,6 @@ namespace realsense2_camera template void Parameters::queueSetRosValue(const std::string& param_name, const int value); template int Parameters::readAndDeleteParam(std::string param_name, const int& initial_value); + + template bool Parameters::getParam(std::string param_name); } diff --git a/realsense2_camera/src/gl_gpu_processing.cpp b/realsense2_camera/src/gl_gpu_processing.cpp new file mode 100644 index 0000000000..d6b8f883e2 --- /dev/null +++ b/realsense2_camera/src/gl_gpu_processing.cpp @@ -0,0 +1,47 @@ +// Copyright 2023 Intel Corporation. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "../include/base_realsense_node.h" +#include + +using namespace realsense2_camera; +using namespace std::chrono_literals; + +#if defined (ACCELERATE_GPU_WITH_GLSL) + +void BaseRealSenseNode::initOpenGLProcessing(bool use_gpu_processing) +{ + // Once we have a window, initialize GL module + // Pass our window to enable sharing of textures between processed frames and the window + // The "use_gpu_processing" is going to control if we will use CPU or GPU for data processing + rs2::gl::init_processing(_app, use_gpu_processing); + if (use_gpu_processing) + rs2::gl::init_rendering(); + + _timer = _node.create_wall_timer(1000ms, std::bind(&BaseRealSenseNode::glfwPollEventCallback, this)); +} + +void BaseRealSenseNode::glfwPollEventCallback() +{ + // Must poll the GLFW events perodically, else window will hang or crash + glfwPollEvents(); +} + +void BaseRealSenseNode::shutdownOpenGLProcessing() +{ + rs2::gl::shutdown_rendering(); + rs2::gl::shutdown_processing(); +} + +#endif diff --git a/realsense2_camera/src/parameters.cpp b/realsense2_camera/src/parameters.cpp index c19c8f5395..f38e11acc6 100644 --- a/realsense2_camera/src/parameters.cpp +++ b/realsense2_camera/src/parameters.cpp @@ -84,15 +84,36 @@ void BaseRealSenseNode::getParameters() _parameters_names.push_back(param_name); param_name = std::string("publish_fps"); - _publish_fps = _parameters->setParam(param_name, 2.0); + _publish_fps = _parameters->setParam(param_name, 6.0); _parameters_names.push_back(param_name); param_name = std::string("color_fps"); - _color_fps = _parameters->setParam(param_name, 6.0); + _color_fps = _parameters->setParam(param_name, 30.0); + _parameters_names.push_back(param_name); + + param_name = std::string("color_freq_tolerance"); + _color_freq_tolerance = _parameters->setParam(param_name, 0.1); _parameters_names.push_back(param_name); _frames_to_skip = _color_fps / _publish_fps; +#if defined (ACCELERATE_GPU_WITH_GLSL) + param_name = std::string("accelerate_gpu_with_glsl"); + _parameters->setParam(param_name, false, + [this](const rclcpp::Parameter& parameter) + { + bool temp_value = parameter.get_value(); + if (_accelerate_gpu_with_glsl != temp_value) + { + _accelerate_gpu_with_glsl = temp_value; + std::lock_guard lock_guard(_profile_changes_mutex); + _is_accelerate_gpu_with_glsl_changed = true; + } + _cv_mpc.notify_one(); + }); + _parameters_names.push_back(param_name); +#endif + } void BaseRealSenseNode::setDynamicParams() diff --git a/realsense2_camera/src/profile_manager.cpp b/realsense2_camera/src/profile_manager.cpp index 95dadbbad6..bbe8989532 100644 --- a/realsense2_camera/src/profile_manager.cpp +++ b/realsense2_camera/src/profile_manager.cpp @@ -39,7 +39,9 @@ std::string applyTemplateName(std::string template_name, stream_index_pair sip) const std::string stream_name(create_graph_resource_name(STREAM_NAME(sip))); char* param_name = new char[template_name.size() + stream_name.size()]; sprintf(param_name, template_name.c_str(), stream_name.c_str()); - return std::string(param_name); + std::string full_name(param_name); + delete [] param_name; + return full_name; } void ProfilesManager::registerSensorQOSParam(std::string template_name, @@ -139,6 +141,40 @@ std::map ProfilesManager::getDefaultProf return sip_default_profiles; } +rs2::stream_profile VideoProfilesManager::validateAndGetSuitableProfile(rs2_stream stream_type, rs2::stream_profile given_profile) +{ + rs2::stream_profile suitable_profile = given_profile; + bool is_given_profile_suitable = false; + + for (auto temp_profile : _all_profiles) + { + if (temp_profile.stream_type() == stream_type) + { + auto video_profile = given_profile.as(); + + if (isSameProfileValues(temp_profile, video_profile.width(), video_profile.height(), video_profile.fps(), RS2_FORMAT_ANY)) + { + is_given_profile_suitable = true; + break; + } + } + } + + // If given profile is not suitable, choose the first available profile for the given stream. + if (!is_given_profile_suitable) + { + for (auto temp_profile : _all_profiles) + { + if (temp_profile.stream_type() == stream_type) + { + suitable_profile = temp_profile; + } + } + } + + return suitable_profile; +} + void ProfilesManager::addWantedProfiles(std::vector& wanted_profiles) { std::map found_sips; @@ -241,7 +277,7 @@ bool VideoProfilesManager::isSameProfileValues(const rs2::stream_profile& profil bool VideoProfilesManager::isWantedProfile(const rs2::stream_profile& profile) { stream_index_pair sip = {profile.stream_type(), profile.stream_index()}; - return isSameProfileValues(profile, _width, _height, _fps, _formats[sip]); + return isSameProfileValues(profile, _width[sip.first], _height[sip.first], _fps[sip.first], _formats[sip]); } void VideoProfilesManager::registerProfileParameters(std::vector all_profiles, std::function update_sensor_func) @@ -270,15 +306,18 @@ void VideoProfilesManager::registerProfileParameters(std::vector } } -std::string VideoProfilesManager::get_profiles_descriptions() +std::string VideoProfilesManager::get_profiles_descriptions(rs2_stream stream_type) { std::set profiles_str; for (auto& profile : _all_profiles) { - auto video_profile = profile.as(); - std::stringstream crnt_profile_str; - crnt_profile_str << video_profile.width() << "x" << video_profile.height() << "x" << video_profile.fps(); - profiles_str.insert(crnt_profile_str.str()); + if (stream_type == profile.stream_type()) + { + auto video_profile = profile.as(); + std::stringstream crnt_profile_str; + crnt_profile_str << video_profile.width() << "x" << video_profile.height() << "x" << video_profile.fps(); + profiles_str.insert(crnt_profile_str.str()); + } } std::stringstream descriptors_strm; for (auto& profile_str : profiles_str) @@ -333,25 +372,52 @@ void VideoProfilesManager::registerVideoSensorProfileFormat(stream_index_pair si void VideoProfilesManager::registerVideoSensorParams(std::set sips) { - // Set default values: - std::map sip_default_profiles = getDefaultProfiles(); - - rs2::stream_profile default_profile = sip_default_profiles.begin()->second; + std::set available_streams; - if (sip_default_profiles.find(DEPTH) != sip_default_profiles.end()) - { - default_profile = sip_default_profiles[DEPTH]; - } - else if (sip_default_profiles.find(COLOR) != sip_default_profiles.end()) + for (auto sip : sips) { - default_profile = sip_default_profiles[COLOR]; + available_streams.insert(sip.first); } - auto video_profile = default_profile.as(); + // Set default values: + std::map sip_default_profiles = getDefaultProfiles(); - _width = video_profile.width(); - _height = video_profile.height(); - _fps = video_profile.fps(); + for (auto stream_type : available_streams) + { + rs2::stream_profile default_profile = sip_default_profiles.begin()->second; + switch (stream_type) + { + case RS2_STREAM_COLOR: + if (sip_default_profiles.find(COLOR) != sip_default_profiles.end()) + { + default_profile = sip_default_profiles[COLOR]; + } + break; + case RS2_STREAM_DEPTH: + if (sip_default_profiles.find(DEPTH) != sip_default_profiles.end()) + { + default_profile = sip_default_profiles[DEPTH]; + } + break; + case RS2_STREAM_INFRARED: + if (sip_default_profiles.find(INFRA1) != sip_default_profiles.end()) + { + default_profile = sip_default_profiles[INFRA1]; + } + else if (sip_default_profiles.find(DEPTH) != sip_default_profiles.end()) + { + default_profile = validateAndGetSuitableProfile(stream_type, sip_default_profiles[DEPTH]); + } + break; + default: + default_profile = validateAndGetSuitableProfile(stream_type, sip_default_profiles.begin()->second); + } + + auto video_profile = default_profile.as(); + _width[stream_type] = video_profile.width(); + _height[stream_type] = video_profile.height(); + _fps[stream_type] = video_profile.fps(); + } // Set the stream formats for (auto sip : sips) @@ -364,72 +430,76 @@ void VideoProfilesManager::registerVideoSensorParams(std::set { stream_index_pair sip = sip_default_profile.first; - default_profile = sip_default_profile.second; - video_profile = default_profile.as(); + auto default_profile = sip_default_profile.second; + auto video_profile = default_profile.as(); - if (isSameProfileValues(default_profile, _width, _height, _fps, video_profile.format())) + if (isSameProfileValues(default_profile, _width[sip.first], _height[sip.first], _fps[sip.first], video_profile.format())) { _formats[sip] = video_profile.format(); } } - // Register ROS parameter: - std::string param_name(_module_name + ".profile"); - rcl_interfaces::msg::ParameterDescriptor crnt_descriptor; - crnt_descriptor.description = "Available options are:\n" + get_profiles_descriptions(); - std::stringstream crnt_profile_str; - crnt_profile_str << _width << "x" << _height << "x" << _fps; - _params.getParameters()->setParam(param_name, crnt_profile_str.str(), [this](const rclcpp::Parameter& parameter) - { - std::regex self_regex("\\s*([0-9]+)\\s*[xX,]\\s*([0-9]+)\\s*[xX,]\\s*([0-9]+)\\s*", std::regex_constants::ECMAScript); - std::smatch match; - std::string profile_str(parameter.get_value()); - bool found = std::regex_match(profile_str, match, self_regex); - bool request_default(false); - if (found) + for (auto stream_type : available_streams) + { + // Register ROS parameter: + std::stringstream param_name_str; + param_name_str << _module_name << "." << create_graph_resource_name(ros_stream_to_string(stream_type)) << "_profile"; + rcl_interfaces::msg::ParameterDescriptor crnt_descriptor; + crnt_descriptor.description = "Available options are:\n" + get_profiles_descriptions(stream_type); + std::stringstream crnt_profile_str; + crnt_profile_str << _width[stream_type] << "x" << _height[stream_type] << "x" << _fps[stream_type]; + _params.getParameters()->setParam(param_name_str.str(), crnt_profile_str.str(), [this, stream_type](const rclcpp::Parameter& parameter) { - int temp_width(std::stoi(match[1])), temp_height(std::stoi(match[2])), temp_fps(std::stoi(match[3])); - if (temp_width <= 0 || temp_height <= 0 || temp_fps <= 0) - { - found = false; - request_default = true; - } - else + std::regex self_regex("\\s*([0-9]+)\\s*[xX,]\\s*([0-9]+)\\s*[xX,]\\s*([0-9]+)\\s*", std::regex_constants::ECMAScript); + std::smatch match; + std::string profile_str(parameter.get_value()); + bool found = std::regex_match(profile_str, match, self_regex); + bool request_default(false); + if (found) { - for (const auto& profile : _all_profiles) + int temp_width(std::stoi(match[1])), temp_height(std::stoi(match[2])), temp_fps(std::stoi(match[3])); + if (temp_width <= 0 || temp_height <= 0 || temp_fps <= 0) { found = false; - if (isSameProfileValues(profile, temp_width, temp_height, temp_fps, RS2_FORMAT_ANY)) + request_default = true; + } + else + { + for (const auto& profile : _all_profiles) { - _width = temp_width; - _height = temp_height; - _fps = temp_fps; - found = true; - ROS_WARN_STREAM("re-enable the stream for the change to take effect."); - break; + found = false; + if (isSameProfileValues(profile, temp_width, temp_height, temp_fps, RS2_FORMAT_ANY)) + { + _width[stream_type] = temp_width; + _height[stream_type] = temp_height; + _fps[stream_type] = temp_fps; + found = true; + ROS_WARN_STREAM("re-enable the stream for the change to take effect."); + break; + } } } } - } - if (!found) - { - std::stringstream crnt_profile_str; - crnt_profile_str << _width << "x" << _height << "x" << _fps; - if (request_default) - { - ROS_INFO_STREAM("Set ROS param " << parameter.get_name() << " to default: " << crnt_profile_str.str()); - } - else + if (!found) { - ROS_ERROR_STREAM("Given value, " << parameter.get_value() << " is invalid. " - << "Run 'ros2 param describe " << parameter.get_name() - << "' to get the list of supported profiles. " - << "Setting ROS param back to: " << crnt_profile_str.str()); + std::stringstream crnt_profile_str; + crnt_profile_str << _width[stream_type] << "x" << _height[stream_type] << "x" << _fps[stream_type]; + if (request_default) + { + ROS_INFO_STREAM("Set ROS param " << parameter.get_name() << " to default: " << crnt_profile_str.str()); + } + else + { + ROS_ERROR_STREAM("Given value, " << parameter.get_value() << " is invalid. " + << "Run 'ros2 param describe " << parameter.get_name() + << "' to get the list of supported profiles. " + << "Setting ROS param back to: " << crnt_profile_str.str()); + } + _params.getParameters()->queueSetRosValue(parameter.get_name(), crnt_profile_str.str()); } - _params.getParameters()->queueSetRosValue(parameter.get_name(), crnt_profile_str.str()); - } - }, crnt_descriptor); - _parameters_names.push_back(param_name); + }, crnt_descriptor); + _parameters_names.push_back(param_name_str.str()); + } for (auto sip : sips) { diff --git a/realsense2_camera/src/realsense_node_factory.cpp b/realsense2_camera/src/realsense_node_factory.cpp index e53b459551..f80ed002e4 100644 --- a/realsense2_camera/src/realsense_node_factory.cpp +++ b/realsense2_camera/src/realsense_node_factory.cpp @@ -107,24 +107,31 @@ void RealSenseNodeFactory::getDevice(rs2::device_list list) std::vector results; ROS_INFO_STREAM("Device with name " << name << " was found."); std::string port_id = parseUsbPort(pn); - if (port_id.empty()) + + std::string pid_str(dev.get_info(RS2_CAMERA_INFO_PRODUCT_ID)); + if(pid_str != "DDS") { - std::stringstream msg; - msg << "Error extracting usb port from device with physical ID: " << pn << std::endl << "Please report on github issue at https://github.com/IntelRealSense/realsense-ros"; - if (_usb_port_id.empty()) + if (port_id.empty()) { - ROS_WARN_STREAM(msg.str()); + std::stringstream msg; + msg << "Error extracting usb port from device with physical ID: " << pn << std::endl + << "Please report on github issue at https://github.com/IntelRealSense/realsense-ros"; + if (_usb_port_id.empty()) + { + ROS_WARN_STREAM(msg.str()); + } + else + { + ROS_ERROR_STREAM(msg.str()); + ROS_ERROR_STREAM("Please use serial number instead of usb port."); + } } else { - ROS_ERROR_STREAM(msg.str()); - ROS_ERROR_STREAM("Please use serial number instead of usb port."); + ROS_INFO_STREAM("Device with port number " << port_id << " was found."); } } - else - { - ROS_INFO_STREAM("Device with port number " << port_id << " was found."); - } + bool found_device_type(true); if (!_device_type.empty()) { @@ -352,8 +359,20 @@ void RealSenseNodeFactory::init() void RealSenseNodeFactory::startDevice() { if (_realSenseNode) _realSenseNode.reset(); + std::string device_name(_device.get_info(RS2_CAMERA_INFO_NAME)); std::string pid_str(_device.get_info(RS2_CAMERA_INFO_PRODUCT_ID)); - uint16_t pid = std::stoi(pid_str, 0, 16); + uint16_t pid; + + if (device_name == "Intel RealSense D555") + { + // currently the PID of DDS devices is hardcoded as "DDS" + // need to be fixed in librealsense + pid = RS555_PID; + } + else + { + pid = std::stoi(pid_str, 0, 16); + } try { switch(pid) @@ -364,6 +383,7 @@ void RealSenseNodeFactory::startDevice() case RS460_PID: case RS415_PID: case RS420_PID: + case RS421_PID: case RS420_MM_PID: case RS430_PID: case RS430i_PID: @@ -373,7 +393,7 @@ void RealSenseNodeFactory::startDevice() case RS435i_RGB_PID: case RS455_PID: case RS457_PID: - case RS465_PID: + case RS555_PID: case RS_USB2_PID: _realSenseNode = std::unique_ptr(new BaseRealSenseNode(*this, _device, _parameters, this->get_node_options().use_intra_process_comms())); break; diff --git a/realsense2_camera/src/ros_sensor.cpp b/realsense2_camera/src/ros_sensor.cpp index dfd5daed99..ed39e09ce1 100644 --- a/realsense2_camera/src/ros_sensor.cpp +++ b/realsense2_camera/src/ros_sensor.cpp @@ -13,6 +13,7 @@ // limitations under the License. #include +#include using namespace realsense2_camera; using namespace rs2; @@ -43,6 +44,7 @@ RosSensor::RosSensor(rs2::sensor sensor, std::function update_sensor_func, std::function hardware_reset_func, std::shared_ptr diagnostics_updater, + double color_freq_tolerance, rclcpp::Logger logger, bool force_image_default_qos, bool is_rosbag_file): @@ -53,7 +55,8 @@ RosSensor::RosSensor(rs2::sensor sensor, _update_sensor_func(update_sensor_func), _hardware_reset_func(hardware_reset_func), _diagnostics_updater(diagnostics_updater), - _force_image_default_qos(force_image_default_qos) + _color_freq_tolerance(color_freq_tolerance), + _force_image_default_qos(force_image_default_qos) { _frame_callback = [this](rs2::frame frame) { @@ -78,8 +81,12 @@ RosSensor::RosSensor(rs2::sensor sensor, RosSensor::~RosSensor() { - clearParameters(); - stop(); + try + { + clearParameters(); + stop(); + } + catch(...){} // Not allowed to throw from Dtor } void RosSensor::setParameters(bool is_rosbag_file) @@ -110,8 +117,35 @@ void RosSensor::UpdateSequenceIdCallback() if (!supports(RS2_OPTION_SEQUENCE_ID)) return; + std::string module_name = create_graph_resource_name(rs2_to_ros(get_info(RS2_CAMERA_INFO_NAME))); + std::string param_name = module_name + "." + create_graph_resource_name(rs2_option_to_string(RS2_OPTION_ENABLE_AUTO_EXPOSURE)); + + bool user_set_enable_ae_value = _params.getParameters()->getParam(param_name); bool is_hdr_enabled = static_cast(get_option(RS2_OPTION_HDR_ENABLED)); + if (is_hdr_enabled && user_set_enable_ae_value) + { + bool is_ae_enabled = static_cast(get_option(RS2_OPTION_ENABLE_AUTO_EXPOSURE)); + + // If AE got auto-disabled, update the Enable_Auto_Exposure ROS paramerter as well accordingly. + if (!is_ae_enabled) + { + ROS_WARN_STREAM("Auto Exposure functionality is not supported when HDR is enabled. " << + "So, disabling the '" << param_name << "' param."); + + try + { + std::vector > funcs; + funcs.push_back([this](){set_sensor_parameter_to_ros(RS2_OPTION_ENABLE_AUTO_EXPOSURE);}); + _params.getParameters()->pushUpdateFunctions(funcs); + } + catch(const std::exception& e) + { + ROS_WARN_STREAM("Failed to set parameter:" << param_name << " : " << e.what()); + } + } + } + // Deleter to revert back the RS2_OPTION_HDR_ENABLED value at the end. auto deleter_to_revert_hdr = std::unique_ptr>(&is_hdr_enabled, [&](bool* enable_back_hdr) { @@ -121,36 +155,35 @@ void RosSensor::UpdateSequenceIdCallback() } }); - // From FW version 5.14.x.x, if HDR is enabled, updating UVC controls like exposure, gain , etc are restricted. - // So, disable it before updating. if (is_hdr_enabled) { + // From FW version 5.14.x.x, if HDR is enabled, updating UVC controls like exposure, gain , etc are restricted. + // So, disable it before updating. It will be reverted back by the deleter 'deleter_to_revert_hdr'. set_option(RS2_OPTION_HDR_ENABLED, false); - } - int original_seq_id = static_cast(get_option(RS2_OPTION_SEQUENCE_ID)); // To Set back to default. - std::string module_name = create_graph_resource_name(rs2_to_ros(get_info(RS2_CAMERA_INFO_NAME))); - - // Read initialization parameters and set to sensor: - std::vector options{RS2_OPTION_EXPOSURE, RS2_OPTION_GAIN}; - unsigned int seq_size = get_option(RS2_OPTION_SEQUENCE_SIZE); - for (unsigned int seq_id = 1; seq_id <= seq_size; seq_id++ ) - { - set_option(RS2_OPTION_SEQUENCE_ID, seq_id); - for (rs2_option& option : options) + int original_seq_id = static_cast(get_option(RS2_OPTION_SEQUENCE_ID)); // To Set back to default. + + // Read initialization parameters and set to sensor: + std::vector options{RS2_OPTION_EXPOSURE, RS2_OPTION_GAIN}; + unsigned int seq_size = get_option(RS2_OPTION_SEQUENCE_SIZE); + for (unsigned int seq_id = 1; seq_id <= seq_size; seq_id++ ) { - std::stringstream param_name_str; - param_name_str << module_name << "." << create_graph_resource_name(rs2_option_to_string(option)) << "." << seq_id; - int option_value = get_option(option); - int user_set_option_value = _params.getParameters()->readAndDeleteParam(param_name_str.str(), option_value); - if (option_value != user_set_option_value) + set_option(RS2_OPTION_SEQUENCE_ID, seq_id); + for (rs2_option& option : options) { - ROS_INFO_STREAM("Set " << rs2_option_to_string(option) << "." << seq_id << " to " << user_set_option_value); - set_option(option, user_set_option_value); + std::stringstream param_name_str; + param_name_str << module_name << "." << create_graph_resource_name(rs2_option_to_string(option)) << "." << seq_id; + int option_value = get_option(option); + int user_set_option_value = _params.getParameters()->readAndDeleteParam(param_name_str.str(), option_value); + if (option_value != user_set_option_value) + { + ROS_INFO_STREAM("Set " << rs2_option_to_string(option) << "." << seq_id << " to " << user_set_option_value); + set_option(option, user_set_option_value); + } } } + set_option(RS2_OPTION_SEQUENCE_ID, original_seq_id); // Set back to default. } - set_option(RS2_OPTION_SEQUENCE_ID, original_seq_id); // Set back to default. // Set callback to update ros parameters to gain and exposure matching the selected sequence_id: const std::string option_name(module_name + "." + create_graph_resource_name(rs2_option_to_string(RS2_OPTION_SEQUENCE_ID))); @@ -255,7 +288,7 @@ bool RosSensor::start(const std::vector& profiles) { stream_index_pair sip(profile.stream_type(), profile.stream_index()); if (_diagnostics_updater) - _frequency_diagnostics.emplace(sip, FrequencyDiagnostics(STREAM_NAME(sip), profile.fps(), _diagnostics_updater)); + _frequency_diagnostics.emplace(sip, FrequencyDiagnostics(STREAM_NAME(sip), profile.fps(), _color_freq_tolerance, _diagnostics_updater)); } return true; } @@ -379,8 +412,19 @@ void RosSensor::set_sensor_auto_exposure_roi() { try { - int width = std::dynamic_pointer_cast(_profile_managers[0])->getWidth(); - int height = std::dynamic_pointer_cast(_profile_managers[0])->getHeight(); + rs2_stream stream_type = RS2_STREAM_ANY; + if (this->rs2::sensor::is()) + { + stream_type = RS2_STREAM_DEPTH; + } + else if (this->rs2::sensor::is()) + { + stream_type = RS2_STREAM_COLOR; + } + assert(stream_type != RS2_STREAM_ANY); + + int width = std::dynamic_pointer_cast(_profile_managers[0])->getWidth(stream_type); + int height = std::dynamic_pointer_cast(_profile_managers[0])->getHeight(stream_type); bool update_roi_range(false); if (_auto_exposure_roi.max_x > width) @@ -411,8 +455,19 @@ void RosSensor::registerAutoExposureROIOptions() if (this->rs2::sensor::is()) { - int width = std::dynamic_pointer_cast(_profile_managers[0])->getWidth(); - int height = std::dynamic_pointer_cast(_profile_managers[0])->getHeight(); + rs2_stream stream_type = RS2_STREAM_ANY; + if (this->rs2::sensor::is()) + { + stream_type = RS2_STREAM_DEPTH; + } + else if (this->rs2::sensor::is()) + { + stream_type = RS2_STREAM_COLOR; + } + assert(stream_type != RS2_STREAM_ANY); + + int width = std::dynamic_pointer_cast(_profile_managers[0])->getWidth(stream_type); + int height = std::dynamic_pointer_cast(_profile_managers[0])->getHeight(stream_type); int max_x(width-1); int max_y(height-1); diff --git a/realsense2_camera/src/ros_utils.cpp b/realsense2_camera/src/ros_utils.cpp index 0ee2172904..8c4d1528c1 100644 --- a/realsense2_camera/src/ros_utils.cpp +++ b/realsense2_camera/src/ros_utils.cpp @@ -141,4 +141,17 @@ const std::string list_available_qos_strings() return res.str(); } +std::string vectorToJsonString(const std::vector& vec) { + std::ostringstream oss; + oss << "["; + for (size_t i = 0; i < vec.size(); ++i) { + oss << static_cast(vec[i]); + if (i < vec.size() - 1) { + oss << ","; + } + } + oss << "]"; + return oss.str(); +} + } diff --git a/realsense2_camera/src/rs_node_setup.cpp b/realsense2_camera/src/rs_node_setup.cpp index d98157f6d9..61555a7b5e 100755 --- a/realsense2_camera/src/rs_node_setup.cpp +++ b/realsense2_camera/src/rs_node_setup.cpp @@ -22,6 +22,10 @@ using namespace rs2; void BaseRealSenseNode::setup() { +#if defined (ACCELERATE_GPU_WITH_GLSL) + initOpenGLProcessing(_accelerate_gpu_with_glsl); + _is_accelerate_gpu_with_glsl_changed = false; +#endif setDynamicParams(); startDiagnosticsUpdater(); setAvailableSensors(); @@ -31,6 +35,7 @@ void BaseRealSenseNode::setup() monitoringProfileChanges(); updateSensors(); publishServices(); + publishActions(); } void BaseRealSenseNode::monitoringProfileChanges() @@ -39,8 +44,20 @@ void BaseRealSenseNode::monitoringProfileChanges() std::function func = [this, time_interval](){ std::unique_lock lock(_profile_changes_mutex); while(_is_running) { - _cv_mpc.wait_for(lock, std::chrono::milliseconds(time_interval), [&]{return (!_is_running || _is_profile_changed || _is_align_depth_changed);}); - if (_is_running && (_is_profile_changed || _is_align_depth_changed)) + _cv_mpc.wait_for(lock, std::chrono::milliseconds(time_interval), + [&]{return (!_is_running || _is_profile_changed + || _is_align_depth_changed + #if defined (ACCELERATE_GPU_WITH_GLSL) + || _is_accelerate_gpu_with_glsl_changed + #endif + );}); + + if (_is_running && (_is_profile_changed + || _is_align_depth_changed + #if defined (ACCELERATE_GPU_WITH_GLSL) + || _is_accelerate_gpu_with_glsl_changed + #endif + )) { ROS_DEBUG("Profile has changed"); try @@ -53,6 +70,10 @@ void BaseRealSenseNode::monitoringProfileChanges() } _is_profile_changed = false; _is_align_depth_changed = false; + + #if defined (ACCELERATE_GPU_WITH_GLSL) + _is_accelerate_gpu_with_glsl_changed = false; + #endif } } }; @@ -139,12 +160,12 @@ void BaseRealSenseNode::setAvailableSensors() sensor.is()) { ROS_DEBUG_STREAM("Set " << module_name << " as VideoSensor."); - rosSensor = std::make_unique(sensor, _parameters, frame_callback_function, update_sensor_func, hardware_reset_func, _diagnostics_updater, _logger, _use_intra_process, _dev.is()); + rosSensor = std::make_unique(sensor, _parameters, frame_callback_function, update_sensor_func, hardware_reset_func, _diagnostics_updater, _color_freq_tolerance, _logger, _use_intra_process, _dev.is()); } else if (sensor.is()) { ROS_DEBUG_STREAM("Set " << module_name << " as ImuSensor."); - rosSensor = std::make_unique(sensor, _parameters, imu_callback_function, update_sensor_func, hardware_reset_func, _diagnostics_updater, _logger, false, _dev.is()); + rosSensor = std::make_unique(sensor, _parameters, imu_callback_function, update_sensor_func, hardware_reset_func, _diagnostics_updater, _color_freq_tolerance, _logger, false, _dev.is()); } else { @@ -274,8 +295,14 @@ void BaseRealSenseNode::startPublishers(const std::vector& profi rclcpp::QoS(rclcpp::QoSInitialization::from_rmw(qos), qos)); // Publish Intrinsics: info_topic_name << "~/" << stream_name << "/imu_info"; + + // IMU Info will have latched QoS, and it will publish its data only once during the ROS Node lifetime. + // intra-process do not support latched QoS, so we need to disable intra-process for this topic + rclcpp::PublisherOptionsWithAllocator> options; + options.use_intra_process_comm = rclcpp::IntraProcessSetting::Disable; _imu_info_publishers[sip] = _node.create_publisher(info_topic_name.str(), - rclcpp::QoS(rclcpp::QoSInitialization::from_rmw(info_qos), info_qos)); + rclcpp::QoS(rclcpp::QoSInitialization::from_rmw(rmw_qos_profile_latched), rmw_qos_profile_latched), + std::move(options)); IMUInfo info_msg = getImuInfo(profile); _imu_info_publishers[sip]->publish(info_msg); } @@ -330,6 +357,34 @@ void BaseRealSenseNode::startRGBDPublisherIfNeeded() void BaseRealSenseNode::updateSensors() { std::lock_guard lock_guard(_update_sensor_mutex); + try{ + stopRequiredSensors(); + + #if defined (ACCELERATE_GPU_WITH_GLSL) + if (_is_accelerate_gpu_with_glsl_changed) + { + shutdownOpenGLProcessing(); + + initOpenGLProcessing(_accelerate_gpu_with_glsl); + } + #endif + + startUpdatedSensors(); + } + catch(const std::exception& ex) + { + ROS_ERROR_STREAM(__FILE__ << ":" << __LINE__ << ":" << "An exception has been thrown: " << ex.what()); + throw; + } + catch(...) + { + ROS_ERROR_STREAM(__FILE__ << ":" << __LINE__ << ":" << "Unknown exception has occured!"); + throw; + } +} + +void BaseRealSenseNode::stopRequiredSensors() +{ try{ for(auto&& sensor : _available_ros_sensors) { @@ -340,21 +395,61 @@ void BaseRealSenseNode::updateSensors() bool is_profile_changed(sensor->getUpdatedProfiles(wanted_profiles)); bool is_video_sensor = (sensor->is() || sensor->is()); - // do all updates if profile has been changed, or if the align depth filter status has been changed + // do all updates if profile has been changed, or if the align depth filter or gpu acceleration status has been changed // and we are on a video sensor. TODO: explore better options to monitor and update changes // without resetting the whole sensors and topics. - if (is_profile_changed || (_is_align_depth_changed && is_video_sensor)) + if (is_profile_changed || (is_video_sensor && (_is_align_depth_changed + #if defined (ACCELERATE_GPU_WITH_GLSL) + || _is_accelerate_gpu_with_glsl_changed + #endif + ))) { std::vector active_profiles = sensor->get_active_streams(); - if(is_profile_changed) + if (is_profile_changed + #if defined (ACCELERATE_GPU_WITH_GLSL) + || _is_accelerate_gpu_with_glsl_changed + #endif + ) { - // Start/stop sensors only if profile was changed + // Start/stop sensors only if profile or gpu acceleration status was changed // No need to start/stop sensors if align_depth was changed ROS_INFO_STREAM("Stopping Sensor: " << module_name); sensor->stop(); } stopPublishers(active_profiles); + } + } + } + catch(const std::exception& ex) + { + ROS_ERROR_STREAM(__FILE__ << ":" << __LINE__ << ":" << "An exception has been thrown: " << ex.what()); + throw; + } + catch(...) + { + ROS_ERROR_STREAM(__FILE__ << ":" << __LINE__ << ":" << "Unknown exception has occured!"); + throw; + } +} +void BaseRealSenseNode::startUpdatedSensors() +{ + try{ + for(auto&& sensor : _available_ros_sensors) + { + std::string module_name(rs2_to_ros(sensor->get_info(RS2_CAMERA_INFO_NAME))); + // if active_profiles != wanted_profiles: stop sensor. + std::vector wanted_profiles; + + bool is_profile_changed(sensor->getUpdatedProfiles(wanted_profiles)); + bool is_video_sensor = (sensor->is() || sensor->is()); + + if (is_profile_changed || (is_video_sensor && (_is_align_depth_changed + #if defined (ACCELERATE_GPU_WITH_GLSL) + || _is_accelerate_gpu_with_glsl_changed + #endif + ))) + { if (!wanted_profiles.empty()) { startPublishers(wanted_profiles, *sensor); @@ -368,9 +463,13 @@ void BaseRealSenseNode::updateSensors() } } - if(is_profile_changed) + if (is_profile_changed + #if defined (ACCELERATE_GPU_WITH_GLSL) + || _is_accelerate_gpu_with_glsl_changed + #endif + ) { - // Start/stop sensors only if profile was changed + // Start/stop sensors only if profile or gpu acceleration was changed // No need to start/stop sensors if align_depth was changed ROS_INFO_STREAM("Starting Sensor: " << module_name); sensor->start(wanted_profiles); @@ -406,11 +505,72 @@ void BaseRealSenseNode::publishServices() { // adding "~/" to the service name will add node namespace and node name to the service // see "Private Namespace Substitution Character" section on https://design.ros2.org/articles/topic_and_service_names.html + _reset_srv = _node.create_service( + "~/hw_reset", + [&](const std_srvs::srv::Empty::Request::SharedPtr req, + std_srvs::srv::Empty::Response::SharedPtr res) + {handleHWReset(req, res);}); + _device_info_srv = _node.create_service( "~/device_info", [&](const realsense2_camera_msgs::srv::DeviceInfo::Request::SharedPtr req, realsense2_camera_msgs::srv::DeviceInfo::Response::SharedPtr res) {getDeviceInfo(req, res);}); + + _calib_config_read_srv = _node.create_service( + "~/calib_config_read", + [&](const realsense2_camera_msgs::srv::CalibConfigRead::Request::SharedPtr req, + realsense2_camera_msgs::srv::CalibConfigRead::Response::SharedPtr res) + {CalibConfigReadService(req, res);}); + + _calib_config_write_srv = _node.create_service( + "~/calib_config_write", + [&](const realsense2_camera_msgs::srv::CalibConfigWrite::Request::SharedPtr req, + realsense2_camera_msgs::srv::CalibConfigWrite::Response::SharedPtr res) + {CalibConfigWriteService(req, res);}); + +} + +void BaseRealSenseNode::publishActions() +{ + + using namespace std::placeholders; + _triggered_calibration_action_server = rclcpp_action::create_server( + _node.get_node_base_interface(), + _node.get_node_clock_interface(), + _node.get_node_logging_interface(), + _node.get_node_waitables_interface(), + "~/triggered_calibration", + std::bind(&BaseRealSenseNode::TriggeredCalibrationHandleGoal, this, _1, _2), + std::bind(&BaseRealSenseNode::TriggeredCalibrationHandleCancel, this, _1), + std::bind(&BaseRealSenseNode::TriggeredCalibrationHandleAccepted, this, _1)); + +} + +void BaseRealSenseNode::handleHWReset(const std_srvs::srv::Empty::Request::SharedPtr req, + const std_srvs::srv::Empty::Response::SharedPtr res) +{ + (void)req; + (void)res; + ROS_INFO_STREAM("Reset requested through service call"); + if (_dev) + { + try + { + for(auto&& sensor : _available_ros_sensors) + { + std::string module_name(rs2_to_ros(sensor->get_info(RS2_CAMERA_INFO_NAME))); + ROS_INFO_STREAM("Stopping Sensor: " << module_name); + sensor->stop(); + } + ROS_INFO("Resetting device..."); + _dev.hardware_reset(); + } + catch(const std::exception& ex) + { + ROS_WARN_STREAM("An exception has been thrown: " << __FILE__ << ":" << __LINE__ << ":" << ex.what()); + } + } } void BaseRealSenseNode::getDeviceInfo(const realsense2_camera_msgs::srv::DeviceInfo::Request::SharedPtr, @@ -432,3 +592,32 @@ void BaseRealSenseNode::getDeviceInfo(const realsense2_camera_msgs::srv::DeviceI res->sensors = sensors_names.str().substr(0, sensors_names.str().size()-1); res->physical_port = _dev.supports(RS2_CAMERA_INFO_PHYSICAL_PORT) ? _dev.get_info(RS2_CAMERA_INFO_PHYSICAL_PORT) : ""; } + +void BaseRealSenseNode::CalibConfigReadService(const realsense2_camera_msgs::srv::CalibConfigRead::Request::SharedPtr req, + realsense2_camera_msgs::srv::CalibConfigRead::Response::SharedPtr res){ + try + { + (void)req; // silence unused parameter warning + res->calib_config = _dev.as().get_calibration_config(); + res->success = true; + } + catch (const std::exception &e) + { + res->success = false; + res->error_message = std::string("Exception occurred: ") + e.what(); + } +} + +void BaseRealSenseNode::CalibConfigWriteService(const realsense2_camera_msgs::srv::CalibConfigWrite::Request::SharedPtr req, + realsense2_camera_msgs::srv::CalibConfigWrite::Response::SharedPtr res){ + try + { + _dev.as().set_calibration_config(req->calib_config); + res->success = true; + } + catch (const std::exception &e) + { + res->success = false; + res->error_message = std::string("Exception occurred: ") + e.what(); + } +} diff --git a/realsense2_camera/test/README.md b/realsense2_camera/test/README.md index ea3a043a92..581fe6f248 100644 --- a/realsense2_camera/test/README.md +++ b/realsense2_camera/test/README.md @@ -20,7 +20,7 @@ It is recommended to use the test folder itself for storing all the cpp tests. H ## Test using pytest The default folder for the test py files is realsense2_camera/test. Two test template files test_launch_template.py and test_integration_template.py are available in the same folder for reference. ### Add a new test -To add a new test, the user can create a copy of the test_launch_template.py or test_integration_template.py and start from there. Please name the file in the format test_`testname`.py so that the CMake detects the file and add as a new test. Please be aware that, multiple tests can be added into one file, the test_integration_template.py itself has more than one test.i The marker `@pytest.mark.launch` is used to specify the test entry point. +To add a new test, the user can create a copy of the test_launch_template.py or test_integration_template.py and start from there. Please name the file in the format test_`testname`.py so that the CMake detects the file and add as a new test. Please be aware that, multiple tests can be added into one file, the test_integration_template.py itself has more than one test. The marker `@pytest.mark.launch` is used to specify the test entry point. The test_launch_template.py uses the rs_launch.py to start the camera node, so this template can be used for testing the rs_launch.py together with the rs node. @@ -28,7 +28,7 @@ The test_integration_template.py gives a better control for testing, it uses few The test_integration_template.py has two types of tests, one has a function "test_using_function". If the user wants to have a better control over the launch context for any specific test scenarios, this can be used. Both the function based test and class based tests use a default launch configuration from the utils. It's recommended to modify the camera name to a unique one in the parameters itself so that there are not clashes between tests. -It is expected that the class based test is used as the test format for most of the usecases. The class based test inherits from pytest_rs_utils.RsTestBaseClass and it has three steps, namely: init, run_test and process_data. Unless for the basic tests, the user will have to override the process_data function and check if the data received from the topics are as expected. Also, if the user doesn't want the base class to modify the data, use 'store_raw_data':True in the theme definition. Please see the test_integration_template.py for reference. +It is expected that the class based test is used as the test format for most of the usecases. The class based test inherits from `pytest_rs_utils.RsTestBaseClass` and it has three steps, namely: init, run_test and process_data. Unless for the basic tests, the user will have to override the process_data function and check if the data received from the topics are as expected. Also, if the user doesn't want the base class to modify the data, use 'store_raw_data':True in the theme definition. Please see the test_integration_template.py for reference. An assert command can be used to indicate if the test failed or passed. Please see the template for more info. @@ -47,11 +47,11 @@ new_folder_for_pytest #<-- new folder #but please be aware that the utils functi ``` ### Grouping of tests -The pytests can be grouped using markers. These markers can be used to run a group of tests. However, "colcon test" command doesn't pass a custom marker using (--pytest-args -m `marker_name`) to the pytest internally. This is because, the ament_cmake that works as a bridge between colcon and pytest doesn't pass the pytest arguments to pytest. So till this is fixed, pytest command has to be used directly for running a group of tests. Please see the next session for the commands to run a group py tests. +The pytests can be grouped using markers. These markers can be used to run a group of tests. However, "colcon test" command doesn't pass a custom marker using (--pytest-args -m `marker_name`) to the pytest internally. This is because, the ament_cmake that works as a bridge between colcon and pytest doesn't pass the pytest arguments to pytest. So until this is fixed, pytest command has to be used directly for running a group of tests. Please see the next session for the commands to run a group of py tests. The grouping is specified by adding a marker just before the test declaration. In the test_integration_template.py `rosbag` is specified as a marker specify tests that use rosbag file. This is achieved by adding "@pytest.mark.rosbag" to the begining of the test. So when the pytest parses for test, it detects the marker for the test. If this marker is selected or none of the markers are specified, the test will be added to the list, else will be listed as a deselected test. -It is recommended to use markers such as ds457, rosbag, ds415 etc to differentiate the tests so that it's easier to run a group of tests in a machine that has the required hardware. +It is recommended to use markers such as d457, rosbag, or d435i etc to differentiate the tests so that it's easier to run a group of tests in a machine that has the required hardware. ## Building and running tests @@ -126,6 +126,15 @@ The xml files mentioned by the command can be directly opened also. ### Running pytests directly +Note : +1. All the commands for test execution has to be executed from realsense-ros folder. For example: If the ROS2 workspace was created based on Step 3 [Option2] of [this](https://github.com/IntelRealSense/realsense-ros/blob/ros2-master/README.md#installation). +Then, the path to execute the tests would be ~/ros2_ws/src/realsense-ros. + + cd ~/ros2_ws/src/realsense-ros + +2. Please setup below required environment variables. If not, Please prefix them for every test execution. + + export PYTHONPATH=$PYTHONPATH:$PWD/realsense2_camera/test/utils:$PWD/realsense2_camera//launch:$PWD/realsense2_camera//scripts User can run all the tests in a pytest file directly using the below command: @@ -160,7 +169,7 @@ If a user wants to add a test to this conditional skip, user can add by adding a ## Points to be noted while writing pytests The tests that are in one file are normally run in parallel, there could also be changes in the pytest plugin. So if there are multiple tests in one file, the system capacity can influence the test execution. It's recomended to have 3-4 tests in file, more than that can affect the test results due to delays. ### Passing/changing parameters -The parameters passed while creating the node can be initialized individually for each test, please see the test_parameterized_template example for reference. The default values are taken from rs_launch.py and the passed parameters are used for overriding the default values. The parameters that can be dynamically modified can be changed using the param interface provided. However, the function create_param_ifs has to be called to create this interface. Please see the test_d455_basic_tests.py for reference. There are specific functions to change the string, integer and bool parameters, the utils can be extended if any more types are needed. +The parameters passed while creating the node can be initialized individually for each test, please see the test_parameterized_template example for reference. The default values are taken from rs_launch.py and the passed parameters are used for overriding the default values. The parameters that can be dynamically modified can be changed using the param interface provided. However, the function `create_service_client_ifs()` has to be called to create this interface. Please see the test_d455_basic_tests.py for reference. There are specific functions to change the string, integer and bool parameters, the utils can be extended if any more types are needed. ### Difference in setting the bool parameters There is a difference between setting the default values of bool parameters and setting them dynamically. The bool test params are assinged withn quotes as below. @@ -174,4 +183,7 @@ The bool test params are assinged withn quotes as below. However the function that implements the setting of bool parameter dynamically takes the python bool datatype. For example: self.set_bool_param('enable_accel', False) - +### Adding 'service call' client interface +1. Create a client for respective service call in method `create_service_client_ifs()`, refer client creation for service call `device_info` with service type `DeviceInfo`. +2. Create a `get_` or `set_` method for each service call, refer `get_deviceinfo` in pytest_rs_utils.py +3. Use the `get_`, `set_` service calls as used in test_camera_service_call.py diff --git a/realsense2_camera/test/live_camera/rosci.py b/realsense2_camera/test/live_camera/rosci.py new file mode 100755 index 0000000000..d6acd50c35 --- /dev/null +++ b/realsense2_camera/test/live_camera/rosci.py @@ -0,0 +1,194 @@ +# Copyright 2024 Intel Corporation. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys, os, subprocess, re, getopt, time + +start_time = time.time() +running_on_ci = False +if 'WORKSPACE' in os.environ: + #Path for ROS-CI on Jenkins + ws_rosci = os.environ['WORKSPACE'] + sys.path.append( os.path.join( ws_rosci, 'lrs/unit-tests/py' )) + running_on_ci = True +else: + #For running this script locally + #Extract the root where both realsense-ros and librealsense are cloned + ws_local = '/'.join(os.path.abspath( __file__ ).split( os.path.sep )[0:-5]) + #expected to have 'librealsense' repo in parallel to 'realsense-ros' + assert os.path.exists( os.path.join(ws_local, 'librealsense')), f" 'librealsense' doesn't exist at {ws_local} " + sys.path.append( os.path.join( ws_local, 'librealsense/unit-tests/py' )) + +#logs are stored @ ./realsense2_camera/test/logs +logdir = os.path.join( '/'.join(os.path.abspath( __file__ ).split( os.path.sep )[0:-2]), 'logs') +dir_live_tests = os.path.dirname(__file__) + +from rspy import log, file +regex = None +hub_reset = False +handle = None +test_ran = False +device_set = list() + +def usage(): + ourname = os.path.basename( sys.argv[0] ) + print( 'Syntax: ' + ourname + ' [options] ' ) + print( 'Options:' ) + print( ' -h, --help Usage help' ) + print( ' -r, --regex Run all tests whose name matches the following regular expression' ) + print( ' e.g.: --regex test_camera_imu; -r d415_basic') + print( ' --device <> Run only on the specified device(s); list of devices separated by ',', No white spaces' ) + print( ' e.g.: --device=d455,d435i,d585 (or) --device d455,d435i,d585 ') + print( ' Note: if --device option not used, tests run on all connected devices ') + + sys.exit( 2 ) + +def command(dev_name, test=None): + cmd = ['pytest-3'] + cmd += ['-s'] + cmd += ['-m', ''.join(dev_name)] + if test: + cmd += ['-k', f'{test}'] + cmd += [''.join(dir_live_tests)] + cmd += ['--debug'] + cmd += [f'--junit-xml={logdir}/{dev_name.upper()}_pytest.xml'] + return cmd + +def run_test(cmd, test=None, dev_name=None, stdout=None, append =False): + handle = None + try: + if test: + stdout = stdout + os.sep + str(dev_name.upper()) + '_' + test + '.log' + else: + stdout = stdout + os.sep + str(dev_name.upper()) + '_' + 'full.log' + if stdout is None: + sys.stdout.flush() + elif stdout and stdout != subprocess.PIPE: + if append: + handle = open( stdout, "a" ) + handle.write( + "\n----------TEST-SEPARATOR----------\n\n" ) + handle.flush() + else: + handle = open( stdout, "w" ) + + result = subprocess.run( cmd, + stdout=handle, + stderr=subprocess.STDOUT, + universal_newlines=True, + timeout=200, + check=True ) + if not result.returncode: + log.i("---Test Passed---") + except Exception as e: + log.e("---Test Failed---") + log.w( "Error Exception:\n ",e ) + + finally: + if handle: + handle.close() + junit_xml_parsing(f'{dev_name.upper()}_pytest.xml') + +def junit_xml_parsing(xml_file): + ''' + - remove redundant hierarchy from testcase 'classname', and 'name' attributes \ + - running pytest-3 w/ --junit-xml={logdir}/{dev_name}_pytest.xml results in classname w/ \ + too long path wich is redundant. \ + - this helps in better reporting structure of test results in jenkins + ''' + import xml.etree.ElementTree as ET + global logdir + + if not os.path.isfile( os.path.join(logdir, f'{xml_file}' )): + log.e(f'{xml_file} not found, test resutls can\'t be generated') + else: + tree = ET.parse(os.path.join(logdir,xml_file)) + root = tree.getroot() + for testsuite in root.findall('testsuite'): + for testcase in testsuite.findall('testcase'): + testcase.set('classname', testcase.attrib['classname'].split('.')[-2]) + testcase.set('name', re.sub('launch_.*parameters','',testcase.attrib['name'])) + new_xml = xml_file.split('.')[0] + tree.write(f'{logdir}/{new_xml}_refined.xml') + +def find_devices_run_tests(): + from rspy import devices + global logdir, device_set, _device_by_sn + max_retry = 3 + + try: + os.makedirs( logdir, exist_ok=True ) + + #Update dict '_device_by_sn' from devices module of rspy + while(max_retry and not devices._device_by_sn): + subprocess.run('ykushcmd ykush3 --reset', shell=True) + time.sleep(2.0) + devices.query( hub_reset = hub_reset ) + max_retry -= 1 + + if not devices._device_by_sn: + assert False, 'No Camera device detected!' + else: + connected_devices = [device.name for device in devices._device_by_sn.values()] + log.i('Connected devices:', connected_devices) + + testname = regex if regex else None + + if device_set: + #Loop in for user specified devices and run tests only on them + devices_not_found = [] + for device in device_set: + if device.upper() in connected_devices: + log.i('Running tests on device:', device) + cmd = command(device.lower(), testname) + run_test(cmd, testname, device, stdout=logdir, append =False) + else: + log.e('Skipping test run on device:', device, ', -- NOT found' ) + devices_not_found.append(device) + assert len(devices_not_found) == 0, f'Devices not found:{devices_not_found}' + else: + #Loop in for all connected devices and run all tests + for device in connected_devices: + log.i('Running tests on device:', device) + cmd = command(device.lower(), testname) + run_test(cmd, testname, device, stdout=logdir, append =False) + finally: + if devices.hub and devices.hub.is_connected(): + devices.hub.disable_ports() + devices.wait_until_all_ports_disabled() + devices.hub.disconnect() + if running_on_ci: + log.i("Log path- \"Build Artifacts\":/ros2/realsense_camera/test/logs ") + else: + log.i("log path:", logdir) + run_time = time.time() - start_time + log.d( "server took", run_time, "seconds" ) + +if __name__ == '__main__': + try: + opts, args = getopt.getopt( sys.argv[1:], 'hr:', longopts=['help', 'regex=', 'device=' ] ) + except getopt.GetoptError as err: + log.e( err ) + usage() + + for opt, arg in opts: + if opt in ('-h', '--help'): + usage() + elif opt in ('-r', '--regex'): + regex = arg + elif opt == '--device': + device_set = arg.split(',') + + find_devices_run_tests() + +sys.exit( 0 ) diff --git a/realsense2_camera/test/live_camera/test_camera_aligned_tests.py b/realsense2_camera/test/live_camera/test_camera_aligned_tests.py index 0c17e76e47..27fc224aa4 100644 --- a/realsense2_camera/test/live_camera/test_camera_aligned_tests.py +++ b/realsense2_camera/test/live_camera/test_camera_aligned_tests.py @@ -46,8 +46,8 @@ 'device_type': 'D455', 'enable_color':'true', 'enable_depth':'true', - 'depth_module.profile':'848x480x30', - 'rgb_camera.profile':'640x480x30', + 'depth_module.depth_profile':'848x480x30', + 'rgb_camera.color_profile':'640x480x30', 'align_depth.enable':'true' } test_params_align_depth_color_d415 = { @@ -55,8 +55,8 @@ 'device_type': 'D415', 'enable_color':'true', 'enable_depth':'true', - 'depth_module.profile':'848x480x30', - 'rgb_camera.profile':'640x480x30', + 'depth_module.depth_profile':'848x480x30', + 'rgb_camera.color_profile':'640x480x30', 'align_depth.enable':'true' } ''' @@ -79,6 +79,7 @@ def test_camera_align_depth_color(self,launch_descr_with_parameters): params = launch_descr_with_parameters[1] if pytest_live_camera_utils.check_if_camera_connected(params['device_type']) == False: print("Device not found? : " + params['device_type']) + assert False return themes = [ {'topic':get_node_heirarchy(params)+'/color/image_raw', @@ -107,11 +108,11 @@ def test_camera_align_depth_color(self,launch_descr_with_parameters): print("Starting camera test...") self.init_test("RsTest"+params['camera_name']) self.wait_for_node(params['camera_name']) - self.create_param_ifs(get_node_heirarchy(params)) + self.create_service_client_ifs(get_node_heirarchy(params)) ret = self.run_test(themes) assert ret[0], ret[1] assert self.process_data(themes) - self.set_string_param('rgb_camera.profile', '1280x720x30') + self.set_string_param('rgb_camera.color_profile', '1280x720x30') self.set_bool_param('enable_color', True) themes[0]['width'] = 1280 themes[0]['height'] = 720 @@ -131,8 +132,8 @@ def test_camera_align_depth_color(self,launch_descr_with_parameters): 'device_type': 'D455', 'enable_color':'true', 'enable_depth':'true', - 'depth_module.profile':'848x480x30', - 'rgb_camera.profile':'640x480x30', + 'depth_module.depth_profile':'848x480x30', + 'rgb_camera.color_profile':'640x480x30', 'align_depth.enable':'true' } test_params_all_profiles_d415 = { @@ -140,17 +141,17 @@ def test_camera_align_depth_color(self,launch_descr_with_parameters): 'device_type': 'D415', 'enable_color':'true', 'enable_depth':'true', - 'depth_module.profile':'848x480x30', - 'rgb_camera.profile':'640x480x30', + 'depth_module.depth_profile':'848x480x30', + 'rgb_camera.color_profile':'640x480x30', 'align_depth.enable':'true' } -test_params_all_profiles_d435 = { - 'camera_name': 'D435', - 'device_type': 'D435', +test_params_all_profiles_d435i = { + 'camera_name': 'D435I', + 'device_type': 'D435I', 'enable_color':'true', 'enable_depth':'true', - 'depth_module.profile':'848x480x30', - 'rgb_camera.profile':'640x480x30', + 'depth_module.depth_profile':'848x480x30', + 'rgb_camera.color_profile':'640x480x30', 'align_depth.enable':'true' } @@ -166,7 +167,7 @@ def test_camera_align_depth_color(self,launch_descr_with_parameters): @pytest.mark.parametrize("launch_descr_with_parameters", [ pytest.param(test_params_all_profiles_d455, marks=pytest.mark.d455), pytest.param(test_params_all_profiles_d415, marks=pytest.mark.d415), - pytest.param(test_params_all_profiles_d435, marks=pytest.mark.d435),] + pytest.param(test_params_all_profiles_d435i, marks=pytest.mark.d435i),] ,indirect=True) @pytest.mark.launch(fixture=launch_descr_with_parameters) class TestCamera_AllAlignDepthColor(pytest_rs_utils.RsTestBaseClass): @@ -178,6 +179,7 @@ def test_camera_all_align_depth_color(self,launch_descr_with_parameters): params = launch_descr_with_parameters[1] if pytest_live_camera_utils.check_if_camera_connected(params['device_type']) == False: print("Device not found? : " + params['device_type']) + assert False return themes = [ {'topic':get_node_heirarchy(params)+'/color/image_raw', @@ -209,7 +211,7 @@ def test_camera_all_align_depth_color(self,launch_descr_with_parameters): if cap == None: debug_print("Device not found? : " + params['device_type']) return - self.create_param_ifs(get_node_heirarchy(params)) + self.create_service_client_ifs(get_node_heirarchy(params)) color_profiles = set([i[1] for i in cap["color_profile"] if i[2] == "RGB8"]) depth_profiles = set([i[1] for i in cap["depth_profile"] if i[0] == "Depth"]) for color_profile in color_profiles: @@ -234,8 +236,8 @@ def test_camera_all_align_depth_color(self,launch_descr_with_parameters): timeout=100.0/fps #for the changes to take effect self.spin_for_time(wait_time=timeout/20) - self.set_string_param('rgb_camera.profile', color_profile) - self.set_string_param('depth_module.profile', depth_profile) + self.set_string_param('rgb_camera.color_profile', color_profile) + self.set_string_param('depth_module.depth_profile', depth_profile) self.set_bool_param('enable_color', True) self.set_bool_param('enable_color', True) self.set_bool_param('align_depth.enable', True) diff --git a/realsense2_camera/test/live_camera/test_camera_all_profile_tests.py b/realsense2_camera/test/live_camera/test_camera_all_profile_tests.py index b337e6f8d5..27cc4a6d32 100644 --- a/realsense2_camera/test/live_camera/test_camera_all_profile_tests.py +++ b/realsense2_camera/test/live_camera/test_camera_all_profile_tests.py @@ -127,9 +127,9 @@ def check_if_skip_test(profile, format): 'camera_name': 'D415', 'device_type': 'D415', } -test_params_all_profiles_d435 = { - 'camera_name': 'D435', - 'device_type': 'D435', +test_params_all_profiles_d435i = { + 'camera_name': 'D435I', + 'device_type': 'D435I', } @@ -144,7 +144,7 @@ def check_if_skip_test(profile, format): @pytest.mark.parametrize("launch_descr_with_parameters", [ pytest.param(test_params_all_profiles_d455, marks=pytest.mark.d455), pytest.param(test_params_all_profiles_d415, marks=pytest.mark.d415), - pytest.param(test_params_all_profiles_d435, marks=pytest.mark.d435),] + pytest.param(test_params_all_profiles_d435i, marks=pytest.mark.d435i),] ,indirect=True) @pytest.mark.launch(fixture=launch_descr_with_parameters) class TestLiveCamera_Change_Resolution(pytest_rs_utils.RsTestBaseClass): @@ -154,6 +154,10 @@ def test_LiveCamera_Change_Resolution(self,launch_descr_with_parameters): num_passed = 0 num_failed = 0 params = launch_descr_with_parameters[1] + if pytest_live_camera_utils.check_if_camera_connected(params['device_type']) == False: + print("Device not found? : " + params['device_type']) + assert False + return themes = [{'topic':get_node_heirarchy(params)+'/color/image_raw', 'msg_type':msg_Image,'expected_data_chunks':1}] config = pytest_live_camera_utils.get_profile_config(get_node_heirarchy(params)) try: @@ -166,7 +170,7 @@ def test_LiveCamera_Change_Resolution(self,launch_descr_with_parameters): if cap == None: debug_print("Device not found? : " + params['device_type']) return - self.create_param_ifs(get_node_heirarchy(params)) + self.create_service_client_ifs(get_node_heirarchy(params)) config["Color"]["default_profile1"],config["Color"]["default_profile2"] = pytest_live_camera_utils.get_default_profiles(cap["color_profile"], "Color") config["Depth"]["default_profile1"],config["Depth"]["default_profile2"] = pytest_live_camera_utils.get_default_profiles(cap["depth_profile"], "Depth") config["Infrared"]["default_profile1"],config["Infrared"]["default_profile2"] = pytest_live_camera_utils.get_default_profiles(cap["depth_profile"], "Infrared") diff --git a/realsense2_camera/test/live_camera/test_camera_fps_tests.py b/realsense2_camera/test/live_camera/test_camera_fps_tests.py index c4b91354d4..e3e66665c8 100644 --- a/realsense2_camera/test/live_camera/test_camera_fps_tests.py +++ b/realsense2_camera/test/live_camera/test_camera_fps_tests.py @@ -63,6 +63,7 @@ modified to make it work, see py_rs_utils for more details. To check the fps, a value 'expected_fps_in_hz' has to be added to the corresponding theme ''' +@pytest.mark.skipif (os.getenv('RS_ROS_REGRESSION', "not found") == "not found",reason="Regression is not enabled, define RS_ROS_REGRESSION") @pytest.mark.parametrize("launch_descr_with_parameters", [ pytest.param(test_params_test_fps_d455, marks=pytest.mark.d455), pytest.param(test_params_test_fps_d415, marks=pytest.mark.d415), @@ -74,6 +75,7 @@ def test_camera_test_fps(self,launch_descr_with_parameters): params = launch_descr_with_parameters[1] if pytest_live_camera_utils.check_if_camera_connected(params['device_type']) == False: print("Device not found? : " + params['device_type']) + assert False return try: ''' @@ -82,9 +84,8 @@ def test_camera_test_fps(self,launch_descr_with_parameters): print("Starting camera test...") self.init_test("RsTest"+params['camera_name']) self.wait_for_node(params['camera_name']) - self.create_param_ifs(get_node_heirarchy(params)) - #assert self.set_bool_param('enable_color', False) - + self.create_service_client_ifs(get_node_heirarchy(params)) + themes = [ {'topic':get_node_heirarchy(params)+'/depth/image_rect_raw', 'msg_type':msg_Image, @@ -92,13 +93,17 @@ def test_camera_test_fps(self,launch_descr_with_parameters): } ] profiles = test_profiles[params['camera_name']]['depth_test_profiles'] + self.spin_for_time(wait_time=0.5) assert self.set_bool_param('enable_color', False) + self.spin_for_time(wait_time=0.5) + for profile in profiles: print("Testing profile: ", profile) themes[0]['expected_fps_in_hz'] = float(profile.split('x')[2]) - assert self.set_string_param('depth_module.profile', profile) + assert self.set_string_param('depth_module.depth_profile', profile) + self.spin_for_time(wait_time=0.5) assert self.set_bool_param('enable_depth', True) - self.spin_for_time(0.5) + self.spin_for_time(wait_time=0.5) ret = self.run_test(themes, timeout=25.0) assert ret[0], ret[1] assert self.process_data(themes) @@ -110,12 +115,15 @@ def test_camera_test_fps(self,launch_descr_with_parameters): } ] assert self.set_bool_param('enable_depth', False) + self.spin_for_time(wait_time=0.5) profiles = test_profiles[params['camera_name']]['color_test_profiles'] for profile in profiles: print("Testing profile: ", profile) themes[0]['expected_fps_in_hz'] = float(profile.split('x')[2]) - assert self.set_string_param('rgb_camera.profile', profile) + assert self.set_string_param('rgb_camera.color_profile', profile) + self.spin_for_time(wait_time=0.5) assert self.set_bool_param('enable_color', True) + self.spin_for_time(wait_time=0.5) ret = self.run_test(themes, timeout=25.0) assert ret[0], ret[1] assert self.process_data(themes) diff --git a/realsense2_camera/test/live_camera/test_camera_imu_tests.py b/realsense2_camera/test/live_camera/test_camera_imu_tests.py index 384d249f47..0dc16f0cf7 100644 --- a/realsense2_camera/test/live_camera/test_camera_imu_tests.py +++ b/realsense2_camera/test/live_camera/test_camera_imu_tests.py @@ -57,6 +57,7 @@ def test_LiveCamera_check_motion_sensor(self,launch_descr_with_parameters): params = launch_descr_with_parameters[1] if pytest_live_camera_utils.check_if_camera_connected(params['device_type']) == False: print("Device not found? : " + params['device_type']) + assert False return themes = [{'topic':get_node_heirarchy(params)+'/imu', 'msg_type':msg_Imu,'expected_data_chunks':1}, {'topic':get_node_heirarchy(params)+'/gyro/sample', 'msg_type':msg_Imu,'expected_data_chunks':1}, @@ -69,7 +70,7 @@ def test_LiveCamera_check_motion_sensor(self,launch_descr_with_parameters): try: #initialize self.init_test("RsTest"+params['camera_name']) - self.create_param_ifs(get_node_heirarchy(params)) + self.create_service_client_ifs(get_node_heirarchy(params)) #run with default params and check the data diff --git a/realsense2_camera/test/live_camera/test_camera_point_cloud_tests.py b/realsense2_camera/test/live_camera/test_camera_point_cloud_tests.py index 14def86ca9..85fc84121c 100644 --- a/realsense2_camera/test/live_camera/test_camera_point_cloud_tests.py +++ b/realsense2_camera/test/live_camera/test_camera_point_cloud_tests.py @@ -55,9 +55,9 @@ 'device_type': 'D455', 'pointcloud.enable': 'true' } -test_params_points_cloud_d435 = { - 'camera_name': 'D435', - 'device_type': 'D435', +test_params_points_cloud_d435i = { + 'camera_name': 'D435I', + 'device_type': 'D435I', 'pointcloud.enable': 'true' } @@ -76,7 +76,7 @@ ''' @pytest.mark.parametrize("launch_descr_with_parameters", [ pytest.param(test_params_points_cloud_d455, marks=pytest.mark.d455), - pytest.param(test_params_points_cloud_d435, marks=pytest.mark.d435), + pytest.param(test_params_points_cloud_d435i, marks=pytest.mark.d435i), pytest.param(test_params_points_cloud_d415, marks=pytest.mark.d415), ],indirect=True) @pytest.mark.launch(fixture=launch_descr_with_parameters) @@ -85,6 +85,7 @@ def test_camera_test_point_cloud(self,launch_descr_with_parameters): self.params = launch_descr_with_parameters[1] if pytest_live_camera_utils.check_if_camera_connected(self.params['device_type']) == False: print("Device not found? : " + self.params['device_type']) + assert False return themes = [ { @@ -99,7 +100,7 @@ def test_camera_test_point_cloud(self,launch_descr_with_parameters): ''' self.init_test("RsTest"+self.params['camera_name']) self.wait_for_node(self.params['camera_name']) - self.create_param_ifs(get_node_heirarchy(self.params)) + self.create_service_client_ifs(get_node_heirarchy(self.params)) ret = self.run_test(themes, timeout=10) assert ret[0], ret[1] ret = self.process_data(themes, False) diff --git a/realsense2_camera/test/live_camera/test_camera_service_call.py b/realsense2_camera/test/live_camera/test_camera_service_call.py new file mode 100644 index 0000000000..9b195064f1 --- /dev/null +++ b/realsense2_camera/test/live_camera/test_camera_service_call.py @@ -0,0 +1,73 @@ +# Copyright 2024 Intel Corporation. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os, sys +import pytest + +sys.path.append(os.path.abspath(os.path.dirname(__file__)+"/../utils")) +import pytest_rs_utils +import pytest_live_camera_utils +from pytest_rs_utils import launch_descr_with_parameters +from pytest_rs_utils import get_node_heirarchy + +test_params_test_srv_d455 = { + 'camera_name': 'D455', + 'device_type': 'D455', + } +test_params_test_srv_d435i = { + 'camera_name': 'D435i', + 'device_type': 'D435i', + } +test_params_test_srv_d415 = { + 'camera_name': 'D415', + 'device_type': 'D415', + } + +''' +The test checks service call device_info with type DeviceInfo +device info includes - device name, FW version, serial number, etc +''' +@pytest.mark.parametrize("launch_descr_with_parameters", [ + pytest.param(test_params_test_srv_d455, marks=pytest.mark.d455), + pytest.param(test_params_test_srv_d435i, marks=pytest.mark.d435i), + pytest.param(test_params_test_srv_d415, marks=pytest.mark.d415), + ],indirect=True) +@pytest.mark.launch(fixture=launch_descr_with_parameters) +class TestCamera_ServiceCall_DeviceInfo(pytest_rs_utils.RsTestBaseClass): + def test_camera_service_call_device_info(self,launch_descr_with_parameters): + params = launch_descr_with_parameters[1] + + if pytest_live_camera_utils.check_if_camera_connected(params['device_type']) == False: + print("Device not found? : " + params['device_type']) + assert False + + try: + print("Starting camera test...") + self.init_test("RsTest"+params['camera_name']) + self.wait_for_node(params['camera_name']) + self.create_service_client_ifs(get_node_heirarchy(params)) + #No need to call run_test() as no frame integritiy check required + response = self.get_deviceinfo() + if response is not None: + print(f"device_info service response:\n{response}\n") + assert params['device_type'].casefold() in response.device_name.casefold().split('_') + else: + assert False, "No device_info response received" + except Exception as e: + print(e) + finally: + #this step is important because the test will fail next time + pytest_rs_utils.kill_realsense2_camera_node() + self.shutdown() + diff --git a/realsense2_camera/test/live_camera/test_camera_tf_tests.py b/realsense2_camera/test/live_camera/test_camera_tf_tests.py index a31f74e77c..64350ff7fe 100644 --- a/realsense2_camera/test/live_camera/test_camera_tf_tests.py +++ b/realsense2_camera/test/live_camera/test_camera_tf_tests.py @@ -58,9 +58,9 @@ 'enable_accel': 'true', 'enable_gyro': 'true', } -test_params_tf_static_change_d435 = { - 'camera_name': 'D435', - 'device_type': 'D435', +test_params_tf_static_change_d435i = { + 'camera_name': 'D435I', + 'device_type': 'D435I', 'enable_infra1': 'false', 'enable_infra2': 'true', 'enable_accel': 'true', @@ -76,8 +76,9 @@ 'enable_gyro': 'true', } @pytest.mark.parametrize("launch_descr_with_parameters", [ - pytest.param(test_params_tf_static_change_d455, marks=pytest.mark.d455), - pytest.param(test_params_tf_static_change_d435, marks=pytest.mark.d435), + #LRS-1181 [ROS2] To debug inconsistent TF (transform) test that fails on Jenkin 219 NUC on D455 + #pytest.param(test_params_tf_static_change_d455, marks=pytest.mark.d455), + pytest.param(test_params_tf_static_change_d435i, marks=pytest.mark.d435i), pytest.param(test_params_tf_static_change_d415, marks=pytest.mark.d415), ],indirect=True) @pytest.mark.launch(fixture=launch_descr_with_parameters) @@ -86,6 +87,7 @@ def test_camera_test_tf_static_change(self,launch_descr_with_parameters): self.params = launch_descr_with_parameters[1] if pytest_live_camera_utils.check_if_camera_connected(self.params['device_type']) == False: print("Device not found? : " + self.params['device_type']) + assert False return themes = [ {'topic':'/tf_static', @@ -100,7 +102,7 @@ def test_camera_test_tf_static_change(self,launch_descr_with_parameters): ''' self.init_test("RsTest"+self.params['camera_name']) self.wait_for_node(self.params['camera_name']) - self.create_param_ifs(get_node_heirarchy(self.params)) + self.create_service_client_ifs(get_node_heirarchy(self.params)) ret = self.run_test(themes, timeout=10) assert ret[0], ret[1] @@ -145,9 +147,9 @@ def process_data(self, themes, enable_infra1): 'tf_publish_rate': '1.1', } -test_params_tf_d435 = { - 'camera_name': 'D435', - 'device_type': 'D435', +test_params_tf_d435i = { + 'camera_name': 'D435I', + 'device_type': 'D435I', 'publish_tf': 'true', 'tf_publish_rate': '1.1', } @@ -159,8 +161,9 @@ def process_data(self, themes, enable_infra1): 'tf_publish_rate': '1.1', } @pytest.mark.parametrize("launch_descr_with_parameters", [ - pytest.param(test_params_tf_d455, marks=pytest.mark.d455), - pytest.param(test_params_tf_d435, marks=pytest.mark.d435), + #LRS-1181 [ROS2] To debug inconsistent TF (transform) test that fails on Jenkin 219 NUC on D455 + #pytest.param(test_params_tf_d455, marks=pytest.mark.d455), + pytest.param(test_params_tf_d435i, marks=pytest.mark.d435i), pytest.param(test_params_tf_d415, marks=pytest.mark.d415), ],indirect=True) @pytest.mark.launch(fixture=launch_descr_with_parameters) @@ -169,6 +172,7 @@ def test_camera_test_tf_dyn(self,launch_descr_with_parameters): self.params = launch_descr_with_parameters[1] if pytest_live_camera_utils.check_if_camera_connected(self.params['device_type']) == False: print("Device not found? : " + self.params['device_type']) + assert False return themes = [ {'topic':'/tf', @@ -190,7 +194,7 @@ def test_camera_test_tf_dyn(self,launch_descr_with_parameters): ''' self.init_test("RsTest"+self.params['camera_name']) self.wait_for_node(self.params['camera_name']) - self.create_param_ifs(get_node_heirarchy(self.params)) + self.create_service_client_ifs(get_node_heirarchy(self.params)) ret = self.run_test(themes, timeout=10) assert ret[0], ret[1] ret = self.process_data(themes, False) diff --git a/realsense2_camera/test/live_camera/test_d415_basic_tests.py b/realsense2_camera/test/live_camera/test_d415_basic_tests.py index 649f84e7bd..1b8ca54510 100644 --- a/realsense2_camera/test/live_camera/test_d415_basic_tests.py +++ b/realsense2_camera/test/live_camera/test_d415_basic_tests.py @@ -57,6 +57,7 @@ def test_D415_Change_Resolution(self,launch_descr_with_parameters): params = launch_descr_with_parameters[1] if pytest_live_camera_utils.check_if_camera_connected(params['device_type']) == False: print("Device not found? : " + params['device_type']) + assert False return failed_tests = [] num_passed = 0 @@ -76,8 +77,6 @@ def test_D415_Change_Resolution(self,launch_descr_with_parameters): config["Infrared2"]["default_profile2"] = "1280x720x6" cap = [ - #['Infrared1', '1920x1080x25', 'Y8'], - #['Infrared1', '1920x1080x15', 'Y16'], ['Infrared', '848x100x100', 'BGRA8'], ['Infrared', '848x480x60', 'RGBA8'], ['Infrared', '640x480x60', 'RGBA8'], @@ -88,16 +87,23 @@ def test_D415_Change_Resolution(self,launch_descr_with_parameters): ['Infrared', '480x270x60', 'BGRA8'], ['Infrared', '480x270x60', 'RGB8'], ['Infrared', '424x240x60', 'BGRA8'], - + ['Infrared1', '848x100x100', 'Y8'], + ['Infrared1', '848x480x6', 'Y8'], + ['Infrared1', '1920x1080x25', 'Y16'], + ['Infrared2', '848x100x100', 'Y8'], + ['Infrared2', '848x480x6', 'Y8'], + ['Infrared2', '1920x1080x25', 'Y16'], ] + try: ''' initialize, run and check the data ''' self.init_test("RsTest"+params['camera_name']) self.spin_for_time(wait_time=1.0) - self.create_param_ifs(get_node_heirarchy(params)) - + self.create_service_client_ifs(get_node_heirarchy(params)) + self.spin_for_time(wait_time=1.0) + for key in cap: profile_type = key[0] profile = key[1] @@ -107,21 +113,22 @@ def test_D415_Change_Resolution(self,launch_descr_with_parameters): themes[0]['width'] = int(profile.split('x')[0]) themes[0]['height'] = int(profile.split('x')[1]) #''' + self.disable_all_streams() if themes[0]['width'] == int(config[profile_type]["default_profile2"].split('x')[0]): self.set_string_param(config[profile_type]["profile"], config[profile_type]["default_profile1"]) else: self.set_string_param(config[profile_type]["profile"], config[profile_type]["default_profile2"]) self.set_bool_param(config[profile_type]["param"], True) - self.disable_all_params() - #self.set_string_param("depth_profile", "640x480x6") - #self.set_bool_param("enable_depth", True) - #''' - self.spin_for_time(wait_time=0.2) + self.spin_for_time(wait_time=1.0) + self.set_string_param(config[profile_type]["profile"], profile) + self.spin_for_time(wait_time=1.0) self.set_string_param(config[profile_type]["format"], format) + self.spin_for_time(wait_time=1.0) self.set_bool_param(config[profile_type]["param"], True) + self.spin_for_time(wait_time=1.0) try: - ret = self.run_test(themes, timeout=5.0) + ret = self.run_test(themes, timeout=10.0) assert ret[0], ret[1] assert self.process_data(themes), " ".join(key) + " failed" num_passed += 1 @@ -143,13 +150,18 @@ def test_D415_Change_Resolution(self,launch_descr_with_parameters): assert False, " Tests failed" - def disable_all_params(self): - ''' + def disable_all_streams(self): + self.set_bool_param('enable_color', False) + self.spin_for_time(wait_time=1.0) self.set_bool_param('enable_depth', False) + self.spin_for_time(wait_time=1.0) self.set_bool_param('enable_infra', False) + self.spin_for_time(wait_time=1.0) self.set_bool_param('enable_infra1', False) + self.spin_for_time(wait_time=1.0) self.set_bool_param('enable_infra2', False) - ''' + self.spin_for_time(wait_time=1.0) + pass diff --git a/realsense2_camera/test/live_camera/test_d455_basic_tests.py b/realsense2_camera/test/live_camera/test_d455_basic_tests.py index fb2f58cab6..b008e3d34e 100644 --- a/realsense2_camera/test/live_camera/test_d455_basic_tests.py +++ b/realsense2_camera/test/live_camera/test_d455_basic_tests.py @@ -58,6 +58,7 @@ def test_D455_Change_Resolution(self,launch_descr_with_parameters): params = launch_descr_with_parameters[1] if pytest_live_camera_utils.check_if_camera_connected(params['device_type']) == False: print("Device not found? : " + params['device_type']) + assert False return themes = [ @@ -76,14 +77,18 @@ def test_D455_Change_Resolution(self,launch_descr_with_parameters): print("Starting camera test...") self.init_test("RsTest"+params['camera_name']) self.wait_for_node(params['camera_name']) - self.create_param_ifs(get_node_heirarchy(params)) - #assert self.set_bool_param('enable_color', False) - assert self.set_string_param('rgb_camera.profile', '640x480x30') + self.create_service_client_ifs(get_node_heirarchy(params)) + self.spin_for_time(0.5) + assert self.set_bool_param('enable_color', False) + self.spin_for_time(0.5) + assert self.set_string_param('rgb_camera.color_profile', '640x480x30') + self.spin_for_time(0.5) assert self.set_bool_param('enable_color', True) + self.spin_for_time(0.5) ret = self.run_test(themes) assert ret[0], ret[1] assert self.process_data(themes) - self.set_string_param('rgb_camera.profile', '1280x800x5') + self.set_string_param('rgb_camera.color_profile', '1280x800x5') self.set_bool_param('enable_color', True) themes[0]['width'] = 1280 themes[0]['height'] = 800 @@ -116,6 +121,7 @@ def test_D455_Seq_ID_update(self,launch_descr_with_parameters): params = launch_descr_with_parameters[1] if pytest_live_camera_utils.check_if_camera_connected(params['device_type']) == False: print("Device not found? : " + params['device_type']) + assert False return try: @@ -125,7 +131,7 @@ def test_D455_Seq_ID_update(self,launch_descr_with_parameters): print("Starting camera test...") self.init_test("RsTest"+params['camera_name']) self.wait_for_node(params['camera_name']) - self.create_param_ifs(get_node_heirarchy(params)) + self.create_service_client_ifs(get_node_heirarchy(params)) assert self.set_bool_param('depth_module.hdr_enabled', False) @@ -150,3 +156,76 @@ def test_D455_Seq_ID_update(self,launch_descr_with_parameters): pytest_rs_utils.kill_realsense2_camera_node() self.shutdown() + + +test_params_reset_device = { + 'camera_name': 'D455', + 'device_type': 'D455', + 'rgb_camera.color_profile': '640x480x30', + } +''' +This test was implemented as a template to set the parameters and run the test. +This directory is not added to the CMakeLists.txt so as to avoid the colcon failure in the +machines that don't have the D455 connected. +1. Only a subset of parameter types are implemented in py_rs_utils, it has to be extended for others +2. After setting the param, rclpy.spin_once may be needed.Test passes even without this though. +''' +@pytest.mark.d455 +@pytest.mark.parametrize("launch_descr_with_parameters", [test_params_reset_device],indirect=True) +@pytest.mark.launch(fixture=launch_descr_with_parameters) +class TestD455_reset_device(pytest_rs_utils.RsTestBaseClass): + def test_D455_Reset_Device(self,launch_descr_with_parameters): + params = launch_descr_with_parameters[1] + if pytest_live_camera_utils.check_if_camera_connected(params['device_type']) == False: + print("Device not found? : " + params['device_type']) + assert False + return + + themes = [ + {'topic':get_node_heirarchy(params)+'/color/image_raw', + 'msg_type':msg_Image, + 'expected_data_chunks':1, + 'width':640, + 'height':480, + #'data':data + } + ] + try: + ''' + initialize, run and check the data + ''' + print("Starting camera test...") + self.init_test("RsTest"+params['camera_name']) + self.wait_for_node(params['camera_name']) + self.create_service_client_ifs(get_node_heirarchy(params)) + self.spin_for_time(0.5) + assert self.set_bool_param('enable_color', False) + self.spin_for_time(0.5) + assert self.set_string_param('rgb_camera.color_profile', '640x480x30') + self.spin_for_time(0.5) + assert self.set_bool_param('enable_color', True) + self.spin_for_time(0.5) + ret = self.run_test(themes) + assert ret[0], ret[1] + assert self.process_data(themes) + self.set_string_param('rgb_camera.color_profile', '1280x800x5') + self.set_bool_param('enable_color', True) + themes[0]['width'] = 1280 + themes[0]['height'] = 800 + + ret = self.run_test(themes) + assert ret[0], ret[1] + assert self.process_data(themes) + + self.reset_device() + + themes[0]['width'] = int(params['rgb_camera.color_profile'].split('x')[0]) + themes[0]['height'] = int(params['rgb_camera.color_profile'].split('x')[1]) + ret = self.run_test(themes) + assert ret[0], ret[1] + assert self.process_data(themes) + + finally: + #this step is important because the test will fail next time + pytest_rs_utils.kill_realsense2_camera_node() + self.shutdown() diff --git a/realsense2_camera/test/utils/pytest_live_camera_utils.py b/realsense2_camera/test/utils/pytest_live_camera_utils.py index 9b5bfeac70..ac91620f12 100644 --- a/realsense2_camera/test/utils/pytest_live_camera_utils.py +++ b/realsense2_camera/test/utils/pytest_live_camera_utils.py @@ -24,11 +24,11 @@ def get_profile_config(camera_name): config = { - "Color":{"profile":"rgb_camera.profile", "format":'rgb_camera.color_format', "param":"enable_color", "topic":camera_name+'/color/image_raw',}, - "Depth":{"profile":"depth_module.profile", "format":'depth_module.depth_format', "param":"enable_depth", 'topic':camera_name+'/depth/image_rect_raw'}, - "Infrared":{"profile":"depth_module.profile", "format":'depth_module.infra_format', "param":"enable_infra", 'topic':camera_name+'/infra/image_rect_raw'}, - "Infrared1":{"profile":"depth_module.profile", "format":'depth_module.infra1_format',"param":"enable_infra1", 'topic':camera_name+'/infra/image_rect_raw'}, - "Infrared2":{"profile":"depth_module.profile", "format":'depth_module.infra2_format',"param":"enable_infra2", 'topic':camera_name+'/infra/image_rect_raw'}, + "Color":{"profile":"rgb_camera.color_profile", "format":'rgb_camera.color_format', "param":"enable_color", "topic":camera_name+'/color/image_raw',}, + "Depth":{"profile":"depth_module.depth_profile", "format":'depth_module.depth_format', "param":"enable_depth", 'topic':camera_name+'/depth/image_rect_raw'}, + "Infrared":{"profile":"depth_module.infra_profile", "format":'depth_module.infra_format', "param":"enable_infra", 'topic':camera_name+'/infra/image_rect_raw'}, + "Infrared1":{"profile":"depth_module.infra_profile", "format":'depth_module.infra1_format',"param":"enable_infra1", 'topic':camera_name+'/infra1/image_rect_raw'}, + "Infrared2":{"profile":"depth_module.infra_profile", "format":'depth_module.infra2_format',"param":"enable_infra2", 'topic':camera_name+'/infra2/image_rect_raw'}, } return config @@ -72,20 +72,29 @@ def get_depth_profiles(long_data, start_index, end_index): if len(long_data[line_no]) == 0: break debug_print("depth profile processing:" + long_data[line_no]) - depth_profile = long_data[line_no].split() - if len(depth_profile) == 5: - profile = depth_profile[0] - value = depth_profile[1]+"x"+depth_profile[3] - format = depth_profile[4] - elif len(depth_profile) == 6: - profile = depth_profile[0]+depth_profile[1] - value = depth_profile[2]+"x"+depth_profile[4] - format = depth_profile[5] + enumerate_devices_line_splitted = long_data[line_no].split() + if len(enumerate_devices_line_splitted) == 7: + stream0_idx = 1 + stream1_idx = 0 + resolution_idx = 2 + frequency_idx = 5 + format_idx = 3 + elif len(enumerate_devices_line_splitted) == 8: + stream0_idx = 1 + stream1_idx = 2 + resolution_idx = 3 + frequency_idx = 6 + format_idx = 4 else: assert false, "Seems that the depth profile info format printed by rs-enumerate-devices changed" - value = value[:-2] - debug_print("depth profile added: " + profile, value, format) - cap.append([profile, value, format]) + if stream1_idx != 0: + depth_camera_stream = enumerate_devices_line_splitted[stream0_idx]+enumerate_devices_line_splitted[stream1_idx] + else: + depth_camera_stream = enumerate_devices_line_splitted[stream0_idx] + depth_profile_param = enumerate_devices_line_splitted[resolution_idx]+"x"+enumerate_devices_line_splitted[frequency_idx] + depth_format_param = enumerate_devices_line_splitted[format_idx] + debug_print("depth profile added: " + depth_camera_stream, depth_profile_param, depth_format_param) + cap.append([depth_camera_stream, depth_profile_param, depth_format_param]) debug_print(cap) return cap @@ -96,16 +105,19 @@ def get_color_profiles(long_data, start_index, end_index): if len(long_data[line_no]) == 0: break debug_print("color profile processing:" + long_data[line_no]) - color_profile = long_data[line_no].split() - if len(color_profile) == 5: - profile = color_profile[0] - value = color_profile[1]+"x"+color_profile[3] - format = color_profile[4] + enumerate_devices_line_splitted = long_data[line_no].split() + if len(enumerate_devices_line_splitted) == 7: + stream_idx = 1 + resolution_idx = 2 + frequency_idx = 5 + format_idx = 3 else: assert false, "Seems that the color profile info format printed by rs-enumerate-devices changed" - value = value[:-2] - debug_print("color profile added: " + profile, value, format) - cap.append([profile, value, format]) + color_camera_stream = enumerate_devices_line_splitted[stream_idx] + color_profile_param = enumerate_devices_line_splitted[resolution_idx]+"x"+enumerate_devices_line_splitted[frequency_idx] + color_format_param = enumerate_devices_line_splitted[format_idx] + debug_print("color profile added: " + color_camera_stream, color_profile_param, color_format_param) + cap.append([color_camera_stream, color_profile_param, color_format_param]) debug_print(cap) return cap @@ -147,7 +159,7 @@ def parse_device_info(long_data, start_index, end_index, device_type, serial_no) return capability def get_camera_capabilities(device_type, serial_no=None): - long_data = os.popen("rs-enumerate-devices").read().splitlines() + long_data = os.popen("rs-enumerate-devices -v").read().splitlines() debug_print(serial_no) index = 0 while index < len(long_data): @@ -186,11 +198,9 @@ def check_if_camera_connected(device_type, serial_no=None): name_line = long_data[index].split() if name_line[0] != "Intel": continue - if name_line[2] != device_type: + if name_line[2].casefold() != device_type.casefold(): continue - if serial_no == None: - return True - if serial_no == name_line[3]: + if serial_no is None or serial_no == name_line[3]: return True return False diff --git a/realsense2_camera/test/utils/pytest_rs_utils.py b/realsense2_camera/test/utils/pytest_rs_utils.py index 0a0069115f..4129ae66c0 100644 --- a/realsense2_camera/test/utils/pytest_rs_utils.py +++ b/realsense2_camera/test/utils/pytest_rs_utils.py @@ -46,6 +46,7 @@ from rcl_interfaces.msg import Parameter from rcl_interfaces.msg import ParameterValue from rcl_interfaces.srv import SetParameters, GetParameters, ListParameters +from std_srvs.srv import Empty ''' humble doesn't have the SetParametersResult and SetParameters_Response imported using __init__.py. The below two lines can be used for iron and hopefully succeeding ROS2 versions @@ -66,6 +67,7 @@ from sensor_msgs.msg import CameraInfo as msg_CameraInfo from realsense2_camera_msgs.msg import Extrinsics as msg_Extrinsics from realsense2_camera_msgs.msg import Metadata as msg_Metadata +from realsense2_camera_msgs.srv import DeviceInfo from sensor_msgs_py import point_cloud2 as pc2 import tf2_ros @@ -783,16 +785,30 @@ def create_subscription(self, msg_type, topic, data_type, store_raw_data=False, else: self.node.reset_data(topic) - - - def create_param_ifs(self, camera_name): + def create_service_client_ifs(self, camera_name): self.set_param_if = self.node.create_client(SetParameters, camera_name + '/set_parameters') self.get_param_if = self.node.create_client(GetParameters, camera_name + '/get_parameters') + self.get_device_info = self.node.create_client(DeviceInfo, camera_name + '/device_info') + self.reset_if = self.node.create_client(Empty, camera_name + '/hw_reset') while not self.get_param_if.wait_for_service(timeout_sec=1.0): print('service not available, waiting again...') while not self.set_param_if.wait_for_service(timeout_sec=1.0): print('service not available, waiting again...') + while not self.get_device_info.wait_for_service(timeout_sec=1.0): + print('service not available, waiting again...') + while not self.reset_if.wait_for_service(timeout_sec=1.0): + print('hw_reset service not available, waiting again...') + + def reset_device(self): + req = Empty.Request() + future = self.reset_if.call_async(req) + while rclpy.ok(): + rclpy.spin_once(self.node) + if future.done(): + return True + return False + def send_param(self, req): future = self.set_param_if.call_async(req) while rclpy.ok(): @@ -806,7 +822,7 @@ def send_param(self, req): print("exception raised:") print(e) pass - return False + return False def get_param(self, req): future = self.get_param_if.call_async(req) @@ -820,7 +836,7 @@ def get_param(self, req): print("exception raised:") print(e) pass - return None + return None def set_string_param(self, param_name, param_value): req = SetParameters.Request() @@ -833,7 +849,16 @@ def set_bool_param(self, param_name, param_value): new_param_value = ParameterValue(type=ParameterType.PARAMETER_BOOL, bool_value=param_value) req.parameters = [Parameter(name=param_name, value=new_param_value)] return self.send_param(req) - + + def get_bool_param(self, param_name): + req = GetParameters.Request() + req.names = [param_name] + value = self.get_param(req) + if (value == None) or (value.type != ParameterType.PARAMETER_BOOL): + return None + else: + return value.bool_value + def set_integer_param(self, param_name, param_value): req = SetParameters.Request() new_param_value = ParameterValue(type=ParameterType.PARAMETER_INTEGER, integer_value=param_value) @@ -844,11 +869,25 @@ def get_integer_param(self, param_name): req = GetParameters.Request() req.names = [param_name] value = self.get_param(req) - if (value == None) or (value.type == ParameterType.PARAMETER_NOT_SET): + if (value == None) or (value.type != ParameterType.PARAMETER_INTEGER): return None else: return value.integer_value - + + def get_deviceinfo(self): + self.req = DeviceInfo.Request() + self.future = self.get_device_info.call_async(self.req) + while rclpy.ok(): + rclpy.spin_once(self.node) + if self.future.done(): + try: + response = self.future.result() + return response + except Exception as e: + print("exception raised:") + print(e) + return None + def spin_for_data(self,themes, timeout=5.0): ''' timeout value varies depending upon the system, it needs to be more if diff --git a/realsense2_camera_msgs/CHANGELOG.rst b/realsense2_camera_msgs/CHANGELOG.rst index 5f1c979598..8a9d9a5778 100644 --- a/realsense2_camera_msgs/CHANGELOG.rst +++ b/realsense2_camera_msgs/CHANGELOG.rst @@ -2,6 +2,11 @@ Changelog for package realsense2_camera_msgs ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +4.55.1 (2024-05-28) +------------------- +* PR `#2830 `_ from SamerKhshiboun: Add RGBD + reduce changes between hkr and development +* Contributors: SamerKhshiboun + 4.54.1 (2023-06-27) ------------------- * add info about extrinsic msg format in Extrinsics.msg and README.md diff --git a/realsense2_camera_msgs/CMakeLists.txt b/realsense2_camera_msgs/CMakeLists.txt index 831cab9243..fe14467a3b 100644 --- a/realsense2_camera_msgs/CMakeLists.txt +++ b/realsense2_camera_msgs/CMakeLists.txt @@ -24,6 +24,10 @@ if(CMAKE_COMPILER_IS_GNUCXX OR CMAKE_CXX_COMPILER_ID MATCHES "Clang") add_compile_options(-Wall -Wextra -Wpedantic) endif() +if(POLICY CMP0148) + cmake_policy(SET CMP0148 OLD) +endif() + find_package(ament_cmake REQUIRED) find_package(rosidl_default_generators REQUIRED) find_package(builtin_interfaces REQUIRED) @@ -39,6 +43,9 @@ set(msg_files rosidl_generate_interfaces(${PROJECT_NAME} ${msg_files} "srv/DeviceInfo.srv" + "srv/CalibConfigRead.srv" + "srv/CalibConfigWrite.srv" + "action/TriggeredCalibration.action" DEPENDENCIES builtin_interfaces std_msgs sensor_msgs ADD_LINTER_TESTS ) diff --git a/realsense2_camera_msgs/action/TriggeredCalibration.action b/realsense2_camera_msgs/action/TriggeredCalibration.action new file mode 100644 index 0000000000..a4884cf171 --- /dev/null +++ b/realsense2_camera_msgs/action/TriggeredCalibration.action @@ -0,0 +1,11 @@ +# request +string json "calib run" +--- +# result +bool success +string error_msg +string calibration +float32 health +--- +# feedback +float32 progress diff --git a/realsense2_camera_msgs/package.xml b/realsense2_camera_msgs/package.xml index a9a3daa9e3..acaa572626 100644 --- a/realsense2_camera_msgs/package.xml +++ b/realsense2_camera_msgs/package.xml @@ -2,7 +2,7 @@ realsense2_camera_msgs - 4.54.1 + 4.55.1 RealSense camera_msgs package containing realsense camera messages definitions LibRealSense ROS Team Apache License 2.0 diff --git a/realsense2_camera_msgs/srv/CalibConfigRead.srv b/realsense2_camera_msgs/srv/CalibConfigRead.srv new file mode 100644 index 0000000000..1f2cf5f041 --- /dev/null +++ b/realsense2_camera_msgs/srv/CalibConfigRead.srv @@ -0,0 +1,4 @@ +--- +bool success +string error_message +string calib_config diff --git a/realsense2_camera_msgs/srv/CalibConfigWrite.srv b/realsense2_camera_msgs/srv/CalibConfigWrite.srv new file mode 100644 index 0000000000..b4d0c99f54 --- /dev/null +++ b/realsense2_camera_msgs/srv/CalibConfigWrite.srv @@ -0,0 +1,4 @@ +string calib_config +--- +bool success +string error_message diff --git a/realsense2_description/CHANGELOG.rst b/realsense2_description/CHANGELOG.rst index 6580fcf252..40a0ccae03 100644 --- a/realsense2_description/CHANGELOG.rst +++ b/realsense2_description/CHANGELOG.rst @@ -2,6 +2,14 @@ Changelog for package realsense2_description ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +4.55.1 (2024-05-28) +------------------- +* PR `#2957 `_ from hellototoro: to_urdf fun retrun a str, not a BufferedRandom +* PR `#2953 `_ from Arun-Prasad-V: Added urdf & mesh files for D405 model +* PR `#2841 `_ from SamerKhshiboun: Remove Dashing, Eloquent, Foxy, L500 and SR300 support +* PR `#2817 `_ from karina-ranadive: Replaced Deprecated function mktemp to TemporaryFile +* Contributors: Arun-Prasad-V, karina-ranadive, SamerKhshiboun, hellototoro + 4.54.1 (2023-06-27) ------------------- * Update mesh path diff --git a/realsense2_description/package.xml b/realsense2_description/package.xml index bc9f573032..6104a506a9 100644 --- a/realsense2_description/package.xml +++ b/realsense2_description/package.xml @@ -2,7 +2,7 @@ realsense2_description - 4.54.1 + 4.55.1 RealSense description package for Intel 3D D400 cameras LibRealSense ROS Team Apache License 2.0 diff --git a/scripts/pr_check.sh b/scripts/pr_check.sh index 837be9f058..cea49c9bb5 100755 --- a/scripts/pr_check.sh +++ b/scripts/pr_check.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Copyright 2023 Intel Corporation. All Rights Reserved. +# Copyright 2024 Intel Corporation. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -29,6 +29,8 @@ fixed=0 license_file=$PWD/../LICENSE +year_format="[[:digit:]][[:digit:]][[:digit:]][[:digit:]]" + function check_folder { for filename in $(find $1 -type f \( -iname \*.cpp -o -iname \*.h -o -iname \*.hpp -o -iname \*.js -o -iname \*.bat -o -iname \*.sh -o -iname \*.txt -o -iname \*.py \)); do @@ -43,7 +45,8 @@ function check_folder { if [[ ! $filename == *"usbhost"* ]]; then # Only check files that are not .gitignore-d if [[ $(git check-ignore $filename | wc -l) -eq 0 ]]; then - if [[ $(grep -oP "Copyright 2023 Intel Corporation. All Rights Reserved" $filename | wc -l) -eq 0 || + if [[ $(grep -oP "Copyright $year_format Intel Corporation. All Rights Reserved" $filename | wc -l) -eq 0 && + $(grep -oP "Copyright $year_format-$year_format Intel Corporation. All Rights Reserved" $filename | wc -l) -eq 0 || $(grep -oP "Licensed under the Apache License, Version 2.0" $filename | wc -l) -eq 0 ]]; then echo "[ERROR] $filename is missing the copyright/license notice" diff --git a/security.md b/security.md new file mode 100644 index 0000000000..d5f1e5eaca --- /dev/null +++ b/security.md @@ -0,0 +1,6 @@ +# Security Policy +Intel is committed to rapidly addressing security vulnerabilities affecting our customers and providing clear guidance on the solution, impact, severity and mitigation. + +## Reporting a Vulnerability +Please report any security vulnerabilities in this project [utilizing the guidelines here](https://www.intel.com/content/www/us/en/security-center/vulnerability-handling-guidelines.html). +