From 46bfd6b89c618f4c41d01d2d3e8b1d8d9628ef0b Mon Sep 17 00:00:00 2001
From: zhliu <56940908+nv-zhliu@users.noreply.github.com>
Date: Wed, 24 Feb 2021 09:40:08 -0800
Subject: [PATCH] Update to 1.0.2 release
---
HOWTO.md | 8 +-
LICENSE | 2 +-
README.md | 13 +-
THIRD_PARTY_LICENSE | 773 ++++++++++++++++++
apps/README | 17 +-
apps/deepstream-imagedata-multistream/README | 8 +-
.../deepstream_imagedata-multistream.py | 240 +++---
.../dstest_imagedata_config.txt | 6 +-
apps/deepstream-nvdsanalytics/README | 75 ++
.../config_nvdsanalytics.txt | 108 +++
.../config_nvdsanalytics_c02.txt | 98 +++
.../deepstream_nvdsanalytics.py | 447 ++++++++++
.../dsnvanalytics_pgie_config.txt | 81 ++
.../dsnvanalytics_tracker_config.txt | 38 +
apps/deepstream-opticalflow/README | 73 ++
.../deepstream-opticalflow.py | 382 +++++++++
apps/deepstream-segmentation/README | 63 ++
.../deepstream_segmentation.py | 285 +++++++
.../dstest_segmentation_config_industrial.txt | 100 +++
.../dstest_segmentation_config_semantic.txt | 99 +++
apps/deepstream-ssd-parser/README | 4 +-
.../custom_parser_guide.md | 62 +-
.../dstest_ssd_nopostprocess.txt | 2 +-
apps/deepstream-test1-rtsp-out/README | 6 +-
apps/deepstream-test1-usbcam/README | 6 +-
apps/deepstream-test1/README | 4 +-
apps/deepstream-test2/README | 15 +-
apps/deepstream-test2/deepstream_test_2.py | 53 +-
.../dstest2_tracker_config.txt | 7 +-
apps/deepstream-test2/tracker_config.yml | 105 ++-
apps/deepstream-test3/README | 4 +-
apps/deepstream-test3/deepstream_test_3.py | 5 +-
apps/deepstream-test4/README | 4 +-
apps/deepstream-test4/deepstream_test_4.py | 8 +-
34 files changed, 2993 insertions(+), 208 deletions(-)
create mode 100644 THIRD_PARTY_LICENSE
create mode 100644 apps/deepstream-nvdsanalytics/README
create mode 100644 apps/deepstream-nvdsanalytics/config_nvdsanalytics.txt
create mode 100644 apps/deepstream-nvdsanalytics/config_nvdsanalytics_c02.txt
create mode 100644 apps/deepstream-nvdsanalytics/deepstream_nvdsanalytics.py
create mode 100644 apps/deepstream-nvdsanalytics/dsnvanalytics_pgie_config.txt
create mode 100644 apps/deepstream-nvdsanalytics/dsnvanalytics_tracker_config.txt
create mode 100644 apps/deepstream-opticalflow/README
create mode 100644 apps/deepstream-opticalflow/deepstream-opticalflow.py
create mode 100644 apps/deepstream-segmentation/README
create mode 100644 apps/deepstream-segmentation/deepstream_segmentation.py
create mode 100644 apps/deepstream-segmentation/dstest_segmentation_config_industrial.txt
create mode 100644 apps/deepstream-segmentation/dstest_segmentation_config_semantic.txt
diff --git a/HOWTO.md b/HOWTO.md
index bfc7286..b02a8d2 100644
--- a/HOWTO.md
+++ b/HOWTO.md
@@ -16,7 +16,7 @@ This guide provides resources for DeepStream application development in Python.
## Prerequisites
* Ubuntu 18.04
-* [DeepStream SDK 5.0](https://developer.nvidia.com/deepstream-download) or later
+* [DeepStream SDK 5.1](https://developer.nvidia.com/deepstream-download) or later
* Python 3.6
* [Gst Python](https://gstreamer.freedesktop.org/modules/gst-python.html) v1.14.5
@@ -38,11 +38,11 @@ If missing, install with the following steps:
## Running Sample Applications
-Clone the deepstream_python_apps repo under /sources:
+Clone the deepstream_python_apps repo under /sources:
git clone https://github.com/NVIDIA-AI-IOT/deepstream_python_apps
This will create the following directory:
-```/sources/deepstream_python_apps```
+```/sources/deepstream_python_apps```
The Python apps are under the "apps" directory.
Go into each app directory and follow instructions in the README.
@@ -204,5 +204,5 @@ This function populates the input buffer with a timestamp generated according to
## Image Data Access
-Decoded images are accessible as NumPy arrays via the `get_nvds_buf_surface` function. This function is documented in the [API Guide](https://docs.nvidia.com/metropolis/deepstream/5.0/python-api/index.html).
+Decoded images are accessible as NumPy arrays via the `get_nvds_buf_surface` function. This function is documented in the [API Guide](https://docs.nvidia.com/metropolis/deepstream/5.1/python-api/index.html).
Please see the [deepstream-imagedata-multistream](apps/deepstream-imagedata-multistream) sample application for an example of image data usage.
diff --git a/LICENSE b/LICENSE
index d4cf9b2..2ee7876 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,6 +1,6 @@
MIT License
-Copyright (c) 2019 NVIDIA CORPORATION. All rights reserved.
+Copyright (c) 2019 - 2021 NVIDIA CORPORATION. All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
diff --git a/README.md b/README.md
index e7a03ca..3ee29f2 100644
--- a/README.md
+++ b/README.md
@@ -2,7 +2,7 @@
This repository contains Python bindings and sample applications for the [DeepStream SDK](https://developer.nvidia.com/deepstream-sdk).
-SDK version supported: 5.0
+SDK version supported: 5.1
Download the latest release package complete with bindings and sample applications from the [release section](../../releases).
@@ -45,6 +45,17 @@ We currently provide the following sample applications:
* [deepstream-ssd-parser](apps/deepstream-ssd-parser) -- SSD model inference via Triton server with output parsing in Python
* [deepstream-test1-usbcam](apps/deepstream-test1-usbcam) -- deepstream-test1 pipeline with USB camera input
* [deepstream-test1-rtsp-out](apps/deepstream-test1-rtsp-out) -- deepstream-test1 pipeline with RTSP output
+* [deepstream-opticalflow](apps/deepstream-opticalflow) -- optical flow and visualization pipeline with flow vectors returned in NumPy array
+* [deepstream-segmentation](apps/deepstream-segmentation) -- segmentation and visualization pipeline with segmentation mask returned in NumPy array
+* [deepstream-nvdsanalytics](apps/deepstream-nvdsanalytics) -- multistream pipeline with analytics plugin
+
+Of these applications, the following have been updated or added in this release:
+* deepstream-test2: added option to enable output of past frame tracking data
+* deepstream-test4: callback functions are registered only once to avoid race condition
+* deepstream-imagedata-multistream: the probe function now modifies images in-place in addition to saving copies of them
+* deepstream-opticalflow: new sample application to demonstrate optical flow functionality
+* deepstream-segmentation: new sample application to demonstrate segmentation functionality
+* deepstream-nvdsnalaytics: new sample application to demonstrate analytics functionality
Detailed application information is provided in each application's subdirectory under [apps](apps).
diff --git a/THIRD_PARTY_LICENSE b/THIRD_PARTY_LICENSE
new file mode 100644
index 0000000..5875265
--- /dev/null
+++ b/THIRD_PARTY_LICENSE
@@ -0,0 +1,773 @@
+DeepStream Python Apps use third-party packages that may be distributed under
+different licensing terms from DeepStream licenses.
+
+
+-------------------------------------
+PyGobject is licensed under LGPLv2.1+.
+-------------------------------------
+
+GNU LESSER GENERAL PUBLIC LICENSE
+Version 2.1, February 1999
+
+Copyright (C) 1991, 1999 Free Software Foundation, Inc.
+51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+Everyone is permitted to copy and distribute verbatim copies
+of this license document, but changing it is not allowed.
+
+[This is the first released version of the Lesser GPL. It also counts
+ as the successor of the GNU Library Public License, version 2, hence
+ the version number 2.1.]
+Preamble
+The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public Licenses are intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users.
+
+This license, the Lesser General Public License, applies to some specially designated software packages--typically libraries--of the Free Software Foundation and other authors who decide to use it. You can use it too, but we suggest you first think carefully about whether this license or the ordinary General Public License is the better strategy to use in any particular case, based on the explanations below.
+
+When we speak of free software, we are referring to freedom of use, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish); that you receive source code or can get it if you want it; that you can change the software and use pieces of it in new free programs; and that you are informed that you can do these things.
+
+To protect your rights, we need to make restrictions that forbid distributors to deny you these rights or to ask you to surrender these rights. These restrictions translate to certain responsibilities for you if you distribute copies of the library or if you modify it.
+
+For example, if you distribute copies of the library, whether gratis or for a fee, you must give the recipients all the rights that we gave you. You must make sure that they, too, receive or can get the source code. If you link other code with the library, you must provide complete object files to the recipients, so that they can relink them with the library after making changes to the library and recompiling it. And you must show them these terms so they know their rights.
+
+We protect your rights with a two-step method: (1) we copyright the library, and (2) we offer you this license, which gives you legal permission to copy, distribute and/or modify the library.
+
+To protect each distributor, we want to make it very clear that there is no warranty for the free library. Also, if the library is modified by someone else and passed on, the recipients should know that what they have is not the original version, so that the original author's reputation will not be affected by problems that might be introduced by others.
+
+Finally, software patents pose a constant threat to the existence of any free program. We wish to make sure that a company cannot effectively restrict the users of a free program by obtaining a restrictive license from a patent holder. Therefore, we insist that any patent license obtained for a version of the library must be consistent with the full freedom of use specified in this license.
+
+Most GNU software, including some libraries, is covered by the ordinary GNU General Public License. This license, the GNU Lesser General Public License, applies to certain designated libraries, and is quite different from the ordinary General Public License. We use this license for certain libraries in order to permit linking those libraries into non-free programs.
+
+When a program is linked with a library, whether statically or using a shared library, the combination of the two is legally speaking a combined work, a derivative of the original library. The ordinary General Public License therefore permits such linking only if the entire combination fits its criteria of freedom. The Lesser General Public License permits more lax criteria for linking other code with the library.
+
+We call this license the "Lesser" General Public License because it does Less to protect the user's freedom than the ordinary General Public License. It also provides other free software developers Less of an advantage over competing non-free programs. These disadvantages are the reason we use the ordinary General Public License for many libraries. However, the Lesser license provides advantages in certain special circumstances.
+
+For example, on rare occasions, there may be a special need to encourage the widest possible use of a certain library, so that it becomes a de-facto standard. To achieve this, non-free programs must be allowed to use the library. A more frequent case is that a free library does the same job as widely used non-free libraries. In this case, there is little to gain by limiting the free library to free software only, so we use the Lesser General Public License.
+
+In other cases, permission to use a particular library in non-free programs enables a greater number of people to use a large body of free software. For example, permission to use the GNU C Library in non-free programs enables many more people to use the whole GNU operating system, as well as its variant, the GNU/Linux operating system.
+
+Although the Lesser General Public License is Less protective of the users' freedom, it does ensure that the user of a program that is linked with the Library has the freedom and the wherewithal to run that program using a modified version of the Library.
+
+The precise terms and conditions for copying, distribution and modification follow. Pay close attention to the difference between a "work based on the library" and a "work that uses the library". The former contains code derived from the library, whereas the latter must be combined with the library in order to run.
+
+TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+0. This License Agreement applies to any software library or other program which contains a notice placed by the copyright holder or other authorized party saying it may be distributed under the terms of this Lesser General Public License (also called "this License"). Each licensee is addressed as "you".
+
+A "library" means a collection of software functions and/or data prepared so as to be conveniently linked with application programs (which use some of those functions and data) to form executables.
+
+The "Library", below, refers to any such software library or work which has been distributed under these terms. A "work based on the Library" means either the Library or any derivative work under copyright law: that is to say, a work containing the Library or a portion of it, either verbatim or with modifications and/or translated straightforwardly into another language. (Hereinafter, translation is included without limitation in the term "modification".)
+
+"Source code" for a work means the preferred form of the work for making modifications to it. For a library, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the library.
+
+Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running a program using the Library is not restricted, and output from such a program is covered only if its contents constitute a work based on the Library (independent of the use of the Library in a tool for writing it). Whether that is true depends on what the Library does and what the program that uses the Library does.
+
+1. You may copy and distribute verbatim copies of the Library's complete source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and distribute a copy of this License along with the Library.
+
+You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee.
+
+2. You may modify your copy or copies of the Library or any portion of it, thus forming a work based on the Library, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions:
+
+a) The modified work must itself be a software library.
+b) You must cause the files modified to carry prominent notices stating that you changed the files and the date of any change.
+c) You must cause the whole of the work to be licensed at no charge to all third parties under the terms of this License.
+d) If a facility in the modified Library refers to a function or a table of data to be supplied by an application program that uses the facility, other than as an argument passed when the facility is invoked, then you must make a good faith effort to ensure that, in the event an application does not supply such function or table, the facility still operates, and performs whatever part of its purpose remains meaningful.
+(For example, a function in a library to compute square roots has a purpose that is entirely well-defined independent of the application. Therefore, Subsection 2d requires that any application-supplied function or table used by this function must be optional: if the application does not supply it, the square root function must still compute square roots.)
+
+These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Library, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Library, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it.
+
+Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Library.
+
+In addition, mere aggregation of another work not based on the Library with the Library (or with a work based on the Library) on a volume of a storage or distribution medium does not bring the other work under the scope of this License.
+
+3. You may opt to apply the terms of the ordinary GNU General Public License instead of this License to a given copy of the Library. To do this, you must alter all the notices that refer to this License, so that they refer to the ordinary GNU General Public License, version 2, instead of to this License. (If a newer version than version 2 of the ordinary GNU General Public License has appeared, then you can specify that version instead if you wish.) Do not make any other change in these notices.
+
+Once this change is made in a given copy, it is irreversible for that copy, so the ordinary GNU General Public License applies to all subsequent copies and derivative works made from that copy.
+
+This option is useful when you wish to copy part of the code of the Library into a program that is not a library.
+
+4. You may copy and distribute the Library (or a portion or derivative of it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange.
+
+If distribution of object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place satisfies the requirement to distribute the source code, even though third parties are not compelled to copy the source along with the object code.
+
+5. A program that contains no derivative of any portion of the Library, but is designed to work with the Library by being compiled or linked with it, is called a "work that uses the Library". Such a work, in isolation, is not a derivative work of the Library, and therefore falls outside the scope of this License.
+
+However, linking a "work that uses the Library" with the Library creates an executable that is a derivative of the Library (because it contains portions of the Library), rather than a "work that uses the library". The executable is therefore covered by this License. Section 6 states terms for distribution of such executables.
+
+When a "work that uses the Library" uses material from a header file that is part of the Library, the object code for the work may be a derivative work of the Library even though the source code is not. Whether this is true is especially significant if the work can be linked without the Library, or if the work is itself a library. The threshold for this to be true is not precisely defined by law.
+
+If such an object file uses only numerical parameters, data structure layouts and accessors, and small macros and small inline functions (ten lines or less in length), then the use of the object file is unrestricted, regardless of whether it is legally a derivative work. (Executables containing this object code plus portions of the Library will still fall under Section 6.)
+
+Otherwise, if the work is a derivative of the Library, you may distribute the object code for the work under the terms of Section 6. Any executables containing that work also fall under Section 6, whether or not they are linked directly with the Library itself.
+
+6. As an exception to the Sections above, you may also combine or link a "work that uses the Library" with the Library to produce a work containing portions of the Library, and distribute that work under terms of your choice, provided that the terms permit modification of the work for the customer's own use and reverse engineering for debugging such modifications.
+
+You must give prominent notice with each copy of the work that the Library is used in it and that the Library and its use are covered by this License. You must supply a copy of this License. If the work during execution displays copyright notices, you must include the copyright notice for the Library among them, as well as a reference directing the user to the copy of this License. Also, you must do one of these things:
+
+a) Accompany the work with the complete corresponding machine-readable source code for the Library including whatever changes were used in the work (which must be distributed under Sections 1 and 2 above); and, if the work is an executable linked with the Library, with the complete machine-readable "work that uses the Library", as object code and/or source code, so that the user can modify the Library and then relink to produce a modified executable containing the modified Library. (It is understood that the user who changes the contents of definitions files in the Library will not necessarily be able to recompile the application to use the modified definitions.)
+b) Use a suitable shared library mechanism for linking with the Library. A suitable mechanism is one that (1) uses at run time a copy of the library already present on the user's computer system, rather than copying library functions into the executable, and (2) will operate properly with a modified version of the library, if the user installs one, as long as the modified version is interface-compatible with the version that the work was made with.
+c) Accompany the work with a written offer, valid for at least three years, to give the same user the materials specified in Subsection 6a, above, for a charge no more than the cost of performing this distribution.
+d) If distribution of the work is made by offering access to copy from a designated place, offer equivalent access to copy the above specified materials from the same place.
+e) Verify that the user has already received a copy of these materials or that you have already sent this user a copy.
+For an executable, the required form of the "work that uses the Library" must include any data and utility programs needed for reproducing the executable from it. However, as a special exception, the materials to be distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable.
+
+It may happen that this requirement contradicts the license restrictions of other proprietary libraries that do not normally accompany the operating system. Such a contradiction means you cannot use both them and the Library together in an executable that you distribute.
+
+7. You may place library facilities that are a work based on the Library side-by-side in a single library together with other library facilities not covered by this License, and distribute such a combined library, provided that the separate distribution of the work based on the Library and of the other library facilities is otherwise permitted, and provided that you do these two things:
+
+a) Accompany the combined library with a copy of the same work based on the Library, uncombined with any other library facilities. This must be distributed under the terms of the Sections above.
+b) Give prominent notice with the combined library of the fact that part of it is a work based on the Library, and explaining where to find the accompanying uncombined form of the same work.
+8. You may not copy, modify, sublicense, link with, or distribute the Library except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense, link with, or distribute the Library is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance.
+
+9. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Library or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Library (or any work based on the Library), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Library or works based on it.
+
+10. Each time you redistribute the Library (or any work based on the Library), the recipient automatically receives a license from the original licensor to copy, distribute, link with or modify the Library subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties with this License.
+
+11. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Library at all. For example, if a patent license would not permit royalty-free redistribution of the Library by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Library.
+
+If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply, and the section as a whole is intended to apply in other circumstances.
+
+It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice.
+
+This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License.
+
+12. If the distribution and/or use of the Library is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Library under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License.
+
+13. The Free Software Foundation may publish revised and/or new versions of the Lesser General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Library specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Library does not specify a license version number, you may choose any version ever published by the Free Software Foundation.
+
+14. If you wish to incorporate parts of the Library into other free programs whose distribution conditions are incompatible with these, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally.
+
+NO WARRANTY
+
+15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+
+ END OF TERMS AND CONDITIONS
+
+
+ Appendix: How to Apply These Terms to Your New Libraries
+
+ If you develop a new library, and you want it to be of the greatest
+possible use to the public, we recommend making it free software that
+everyone can redistribute and change. You can do so by permitting
+redistribution under these terms (or, alternatively, under the terms of the
+ordinary General Public License).
+
+ To apply these terms, attach the following notices to the library. It is
+safest to attach them to the start of each source file to most effectively
+convey the exclusion of warranty; and each file should have at least the
+"copyright" line and a pointer to where the full notice is found.
+
+
+ Copyright (C)
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Library General Public
+ License as published by the Free Software Foundation; either
+ version 2 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Library General Public License for more details.
+
+ You should have received a copy of the GNU Library General Public
+ License along with this library; if not, write to the Free
+ Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA.
+
+Also add information on how to contact you by electronic and paper mail.
+
+You should also get your employer (if you work as a programmer) or your
+school, if any, to sign a "copyright disclaimer" for the library, if
+necessary. Here is a sample; alter the names:
+
+ Yoyodyne, Inc., hereby disclaims all copyright interest in the
+ library `Frob' (a library for tweaking knobs) written by James Random Hacker.
+
+ , 1 April 1990
+ Ty Coon, President of Vice
+
+That's all there is to it!
+-----------------------------------------------
+Python3 packages are released under PSF License
+-----------------------------------------------
+
+1. This LICENSE AGREEMENT is between the Python Software Foundation ("PSF"), and
+ the Individual or Organization ("Licensee") accessing and otherwise using Python
+ 3.6.12 software in source or binary form and its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, PSF hereby
+ grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
+ analyze, test, perform and/or display publicly, prepare derivative works,
+ distribute, and otherwise use Python 3.6.12 alone or in any derivative
+ version, provided, however, that PSF's License Agreement and PSF's notice of
+ copyright, i.e., "Copyright © 2001-2020 Python Software Foundation; All Rights
+ Reserved" are retained in Python 3.6.12 alone or in any derivative version
+ prepared by Licensee.
+
+3. In the event Licensee prepares a derivative work that is based on or
+ incorporates Python 3.6.12 or any part thereof, and wants to make the
+ derivative work available to others as provided herein, then Licensee hereby
+ agrees to include in any such work a brief summary of the changes made to Python
+ 3.6.12.
+
+4. PSF is making Python 3.6.12 available to Licensee on an "AS IS" basis.
+ PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED. BY WAY OF
+ EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND DISCLAIMS ANY REPRESENTATION OR
+ WARRANTY OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE
+ USE OF PYTHON 3.6.12 WILL NOT INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON 3.6.12
+ FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS A RESULT OF
+ MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 3.6.12, OR ANY DERIVATIVE
+ THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material breach of
+ its terms and conditions.
+
+7. Nothing in this License Agreement shall be deemed to create any relationship
+ of agency, partnership, or joint venture between PSF and Licensee. This License
+ Agreement does not grant permission to use PSF trademarks or trade name in a
+ trademark sense to endorse or promote products or services of Licensee, or any
+ third party.
+
+8. By copying, installing or otherwise using Python 3.6.12, Licensee agrees
+ to be bound by the terms and conditions of this License Agreement.
+
+
+-----------------------------------------------
+Python-OpenCV is released under the MIT License
+-----------------------------------------------
+MIT License
+
+Copyright (c) Olli-Pekka Heinisuo
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
+--------------------------------------------
+NumPy is released under 3-Clause BSD License
+--------------------------------------------
+Copyright (c) 2005 - 2020, NumPy Developers
+
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+
+Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
+
+Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
+
+Neither the name of the NumPy Developers nor the names of any contributors may be used to endorse or promote products derived from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+--------------------------------
+GStreamer is released under LGPL
+--------------------------------
+
+ GNU LIBRARY GENERAL PUBLIC LICENSE
+ Version 2, June 1991
+
+ Copyright (C) 1991 Free Software Foundation, Inc.
+ 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+[This is the first released version of the library GPL. It is
+ numbered 2 because it goes with version 2 of the ordinary GPL.]
+
+ Preamble
+
+ The licenses for most software are designed to take away your
+freedom to share and change it. By contrast, the GNU General Public
+Licenses are intended to guarantee your freedom to share and change
+free software--to make sure the software is free for all its users.
+
+ This license, the Library General Public License, applies to some
+specially designated Free Software Foundation software, and to any
+other libraries whose authors decide to use it. You can use it for
+your libraries, too.
+
+ When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+this service if you wish), that you receive source code or can get it
+if you want it, that you can change the software or use pieces of it
+in new free programs; and that you know you can do these things.
+
+ To protect your rights, we need to make restrictions that forbid
+anyone to deny you these rights or to ask you to surrender the rights.
+These restrictions translate to certain responsibilities for you if
+you distribute copies of the library, or if you modify it.
+
+ For example, if you distribute copies of the library, whether gratis
+or for a fee, you must give the recipients all the rights that we gave
+you. You must make sure that they, too, receive or can get the source
+code. If you link a program with the library, you must provide
+complete object files to the recipients so that they can relink them
+with the library, after making changes to the library and recompiling
+it. And you must show them these terms so they know their rights.
+
+ Our method of protecting your rights has two steps: (1) copyright
+the library, and (2) offer you this license which gives you legal
+permission to copy, distribute and/or modify the library.
+
+ Also, for each distributor's protection, we want to make certain
+that everyone understands that there is no warranty for this free
+library. If the library is modified by someone else and passed on, we
+want its recipients to know that what they have is not the original
+version, so that any problems introduced by others will not reflect on
+the original authors' reputations.
+
+
+ Finally, any free program is threatened constantly by software
+patents. We wish to avoid the danger that companies distributing free
+software will individually obtain patent licenses, thus in effect
+transforming the program into proprietary software. To prevent this,
+we have made it clear that any patent must be licensed for everyone's
+free use or not licensed at all.
+
+ Most GNU software, including some libraries, is covered by the ordinary
+GNU General Public License, which was designed for utility programs. This
+license, the GNU Library General Public License, applies to certain
+designated libraries. This license is quite different from the ordinary
+one; be sure to read it in full, and don't assume that anything in it is
+the same as in the ordinary license.
+
+ The reason we have a separate public license for some libraries is that
+they blur the distinction we usually make between modifying or adding to a
+program and simply using it. Linking a program with a library, without
+changing the library, is in some sense simply using the library, and is
+analogous to running a utility program or application program. However, in
+a textual and legal sense, the linked executable is a combined work, a
+derivative of the original library, and the ordinary General Public License
+treats it as such.
+
+ Because of this blurred distinction, using the ordinary General
+Public License for libraries did not effectively promote software
+sharing, because most developers did not use the libraries. We
+concluded that weaker conditions might promote sharing better.
+
+ However, unrestricted linking of non-free programs would deprive the
+users of those programs of all benefit from the free status of the
+libraries themselves. This Library General Public License is intended to
+permit developers of non-free programs to use free libraries, while
+preserving your freedom as a user of such programs to change the free
+libraries that are incorporated in them. (We have not seen how to achieve
+this as regards changes in header files, but we have achieved it as regards
+changes in the actual functions of the Library.) The hope is that this
+will lead to faster development of free libraries.
+
+ The precise terms and conditions for copying, distribution and
+modification follow. Pay close attention to the difference between a
+"work based on the library" and a "work that uses the library". The
+former contains code derived from the library, while the latter only
+works together with the library.
+
+ Note that it is possible for a library to be covered by the ordinary
+General Public License rather than by this special one.
+
+
+ GNU LIBRARY GENERAL PUBLIC LICENSE
+ TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+ 0. This License Agreement applies to any software library which
+contains a notice placed by the copyright holder or other authorized
+party saying it may be distributed under the terms of this Library
+General Public License (also called "this License"). Each licensee is
+addressed as "you".
+
+ A "library" means a collection of software functions and/or data
+prepared so as to be conveniently linked with application programs
+(which use some of those functions and data) to form executables.
+
+ The "Library", below, refers to any such software library or work
+which has been distributed under these terms. A "work based on the
+Library" means either the Library or any derivative work under
+copyright law: that is to say, a work containing the Library or a
+portion of it, either verbatim or with modifications and/or translated
+straightforwardly into another language. (Hereinafter, translation is
+included without limitation in the term "modification".)
+
+ "Source code" for a work means the preferred form of the work for
+making modifications to it. For a library, complete source code means
+all the source code for all modules it contains, plus any associated
+interface definition files, plus the scripts used to control compilation
+and installation of the library.
+
+ Activities other than copying, distribution and modification are not
+covered by this License; they are outside its scope. The act of
+running a program using the Library is not restricted, and output from
+such a program is covered only if its contents constitute a work based
+on the Library (independent of the use of the Library in a tool for
+writing it). Whether that is true depends on what the Library does
+and what the program that uses the Library does.
+
+ 1. You may copy and distribute verbatim copies of the Library's
+complete source code as you receive it, in any medium, provided that
+you conspicuously and appropriately publish on each copy an
+appropriate copyright notice and disclaimer of warranty; keep intact
+all the notices that refer to this License and to the absence of any
+warranty; and distribute a copy of this License along with the
+Library.
+
+ You may charge a fee for the physical act of transferring a copy,
+and you may at your option offer warranty protection in exchange for a
+fee.
+
+
+ 2. You may modify your copy or copies of the Library or any portion
+of it, thus forming a work based on the Library, and copy and
+distribute such modifications or work under the terms of Section 1
+above, provided that you also meet all of these conditions:
+
+ a) The modified work must itself be a software library.
+
+ b) You must cause the files modified to carry prominent notices
+ stating that you changed the files and the date of any change.
+
+ c) You must cause the whole of the work to be licensed at no
+ charge to all third parties under the terms of this License.
+
+ d) If a facility in the modified Library refers to a function or a
+ table of data to be supplied by an application program that uses
+ the facility, other than as an argument passed when the facility
+ is invoked, then you must make a good faith effort to ensure that,
+ in the event an application does not supply such function or
+ table, the facility still operates, and performs whatever part of
+ its purpose remains meaningful.
+
+ (For example, a function in a library to compute square roots has
+ a purpose that is entirely well-defined independent of the
+ application. Therefore, Subsection 2d requires that any
+ application-supplied function or table used by this function must
+ be optional: if the application does not supply it, the square
+ root function must still compute square roots.)
+
+These requirements apply to the modified work as a whole. If
+identifiable sections of that work are not derived from the Library,
+and can be reasonably considered independent and separate works in
+themselves, then this License, and its terms, do not apply to those
+sections when you distribute them as separate works. But when you
+distribute the same sections as part of a whole which is a work based
+on the Library, the distribution of the whole must be on the terms of
+this License, whose permissions for other licensees extend to the
+entire whole, and thus to each and every part regardless of who wrote
+it.
+
+Thus, it is not the intent of this section to claim rights or contest
+your rights to work written entirely by you; rather, the intent is to
+exercise the right to control the distribution of derivative or
+collective works based on the Library.
+
+In addition, mere aggregation of another work not based on the Library
+with the Library (or with a work based on the Library) on a volume of
+a storage or distribution medium does not bring the other work under
+the scope of this License.
+
+ 3. You may opt to apply the terms of the ordinary GNU General Public
+License instead of this License to a given copy of the Library. To do
+this, you must alter all the notices that refer to this License, so
+that they refer to the ordinary GNU General Public License, version 2,
+instead of to this License. (If a newer version than version 2 of the
+ordinary GNU General Public License has appeared, then you can specify
+that version instead if you wish.) Do not make any other change in
+these notices.
+
+
+ Once this change is made in a given copy, it is irreversible for
+that copy, so the ordinary GNU General Public License applies to all
+subsequent copies and derivative works made from that copy.
+
+ This option is useful when you wish to copy part of the code of
+the Library into a program that is not a library.
+
+ 4. You may copy and distribute the Library (or a portion or
+derivative of it, under Section 2) in object code or executable form
+under the terms of Sections 1 and 2 above provided that you accompany
+it with the complete corresponding machine-readable source code, which
+must be distributed under the terms of Sections 1 and 2 above on a
+medium customarily used for software interchange.
+
+ If distribution of object code is made by offering access to copy
+from a designated place, then offering equivalent access to copy the
+source code from the same place satisfies the requirement to
+distribute the source code, even though third parties are not
+compelled to copy the source along with the object code.
+
+ 5. A program that contains no derivative of any portion of the
+Library, but is designed to work with the Library by being compiled or
+linked with it, is called a "work that uses the Library". Such a
+work, in isolation, is not a derivative work of the Library, and
+therefore falls outside the scope of this License.
+
+ However, linking a "work that uses the Library" with the Library
+creates an executable that is a derivative of the Library (because it
+contains portions of the Library), rather than a "work that uses the
+library". The executable is therefore covered by this License.
+Section 6 states terms for distribution of such executables.
+
+ When a "work that uses the Library" uses material from a header file
+that is part of the Library, the object code for the work may be a
+derivative work of the Library even though the source code is not.
+Whether this is true is especially significant if the work can be
+linked without the Library, or if the work is itself a library. The
+threshold for this to be true is not precisely defined by law.
+
+ If such an object file uses only numerical parameters, data
+structure layouts and accessors, and small macros and small inline
+functions (ten lines or less in length), then the use of the object
+file is unrestricted, regardless of whether it is legally a derivative
+work. (Executables containing this object code plus portions of the
+Library will still fall under Section 6.)
+
+ Otherwise, if the work is a derivative of the Library, you may
+distribute the object code for the work under the terms of Section 6.
+Any executables containing that work also fall under Section 6,
+whether or not they are linked directly with the Library itself.
+
+
+ 6. As an exception to the Sections above, you may also compile or
+link a "work that uses the Library" with the Library to produce a
+work containing portions of the Library, and distribute that work
+under terms of your choice, provided that the terms permit
+modification of the work for the customer's own use and reverse
+engineering for debugging such modifications.
+
+ You must give prominent notice with each copy of the work that the
+Library is used in it and that the Library and its use are covered by
+this License. You must supply a copy of this License. If the work
+during execution displays copyright notices, you must include the
+copyright notice for the Library among them, as well as a reference
+directing the user to the copy of this License. Also, you must do one
+of these things:
+
+ a) Accompany the work with the complete corresponding
+ machine-readable source code for the Library including whatever
+ changes were used in the work (which must be distributed under
+ Sections 1 and 2 above); and, if the work is an executable linked
+ with the Library, with the complete machine-readable "work that
+ uses the Library", as object code and/or source code, so that the
+ user can modify the Library and then relink to produce a modified
+ executable containing the modified Library. (It is understood
+ that the user who changes the contents of definitions files in the
+ Library will not necessarily be able to recompile the application
+ to use the modified definitions.)
+
+ b) Accompany the work with a written offer, valid for at
+ least three years, to give the same user the materials
+ specified in Subsection 6a, above, for a charge no more
+ than the cost of performing this distribution.
+
+ c) If distribution of the work is made by offering access to copy
+ from a designated place, offer equivalent access to copy the above
+ specified materials from the same place.
+
+ d) Verify that the user has already received a copy of these
+ materials or that you have already sent this user a copy.
+
+ For an executable, the required form of the "work that uses the
+Library" must include any data and utility programs needed for
+reproducing the executable from it. However, as a special exception,
+the source code distributed need not include anything that is normally
+distributed (in either source or binary form) with the major
+components (compiler, kernel, and so on) of the operating system on
+which the executable runs, unless that component itself accompanies
+the executable.
+
+ It may happen that this requirement contradicts the license
+restrictions of other proprietary libraries that do not normally
+accompany the operating system. Such a contradiction means you cannot
+use both them and the Library together in an executable that you
+distribute.
+
+
+ 7. You may place library facilities that are a work based on the
+Library side-by-side in a single library together with other library
+facilities not covered by this License, and distribute such a combined
+library, provided that the separate distribution of the work based on
+the Library and of the other library facilities is otherwise
+permitted, and provided that you do these two things:
+
+ a) Accompany the combined library with a copy of the same work
+ based on the Library, uncombined with any other library
+ facilities. This must be distributed under the terms of the
+ Sections above.
+
+ b) Give prominent notice with the combined library of the fact
+ that part of it is a work based on the Library, and explaining
+ where to find the accompanying uncombined form of the same work.
+
+ 8. You may not copy, modify, sublicense, link with, or distribute
+the Library except as expressly provided under this License. Any
+attempt otherwise to copy, modify, sublicense, link with, or
+distribute the Library is void, and will automatically terminate your
+rights under this License. However, parties who have received copies,
+or rights, from you under this License will not have their licenses
+terminated so long as such parties remain in full compliance.
+
+ 9. You are not required to accept this License, since you have not
+signed it. However, nothing else grants you permission to modify or
+distribute the Library or its derivative works. These actions are
+prohibited by law if you do not accept this License. Therefore, by
+modifying or distributing the Library (or any work based on the
+Library), you indicate your acceptance of this License to do so, and
+all its terms and conditions for copying, distributing or modifying
+the Library or works based on it.
+
+ 10. Each time you redistribute the Library (or any work based on the
+Library), the recipient automatically receives a license from the
+original licensor to copy, distribute, link with or modify the Library
+subject to these terms and conditions. You may not impose any further
+restrictions on the recipients' exercise of the rights granted herein.
+You are not responsible for enforcing compliance by third parties to
+this License.
+
+
+ 11. If, as a consequence of a court judgment or allegation of patent
+infringement or for any other reason (not limited to patent issues),
+conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot
+distribute so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you
+may not distribute the Library at all. For example, if a patent
+license would not permit royalty-free redistribution of the Library by
+all those who receive copies directly or indirectly through you, then
+the only way you could satisfy both it and this License would be to
+refrain entirely from distribution of the Library.
+
+If any portion of this section is held invalid or unenforceable under any
+particular circumstance, the balance of the section is intended to apply,
+and the section as a whole is intended to apply in other circumstances.
+
+It is not the purpose of this section to induce you to infringe any
+patents or other property right claims or to contest validity of any
+such claims; this section has the sole purpose of protecting the
+integrity of the free software distribution system which is
+implemented by public license practices. Many people have made
+generous contributions to the wide range of software distributed
+through that system in reliance on consistent application of that
+system; it is up to the author/donor to decide if he or she is willing
+to distribute software through any other system and a licensee cannot
+impose that choice.
+
+This section is intended to make thoroughly clear what is believed to
+be a consequence of the rest of this License.
+
+ 12. If the distribution and/or use of the Library is restricted in
+certain countries either by patents or by copyrighted interfaces, the
+original copyright holder who places the Library under this License may add
+an explicit geographical distribution limitation excluding those countries,
+so that distribution is permitted only in or among countries not thus
+excluded. In such case, this License incorporates the limitation as if
+written in the body of this License.
+
+ 13. The Free Software Foundation may publish revised and/or new
+versions of the Library General Public License from time to time.
+Such new versions will be similar in spirit to the present version,
+but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Library
+specifies a version number of this License which applies to it and
+"any later version", you have the option of following the terms and
+conditions either of that version or of any later version published by
+the Free Software Foundation. If the Library does not specify a
+license version number, you may choose any version ever published by
+the Free Software Foundation.
+
+
+ 14. If you wish to incorporate parts of the Library into other free
+programs whose distribution conditions are incompatible with these,
+write to the author to ask for permission. For software which is
+copyrighted by the Free Software Foundation, write to the Free
+Software Foundation; we sometimes make exceptions for this. Our
+decision will be guided by the two goals of preserving the free status
+of all derivatives of our free software and of promoting the sharing
+and reuse of software generally.
+
+ NO WARRANTY
+
+ 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO
+WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW.
+EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR
+OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY
+KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE
+LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME
+THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+ 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN
+WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY
+AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU
+FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR
+CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE
+LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING
+RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A
+FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF
+SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
+DAMAGES.
+
+ END OF TERMS AND CONDITIONS
+
+
+ Appendix: How to Apply These Terms to Your New Libraries
+
+ If you develop a new library, and you want it to be of the greatest
+possible use to the public, we recommend making it free software that
+everyone can redistribute and change. You can do so by permitting
+redistribution under these terms (or, alternatively, under the terms of the
+ordinary General Public License).
+
+ To apply these terms, attach the following notices to the library. It is
+safest to attach them to the start of each source file to most effectively
+convey the exclusion of warranty; and each file should have at least the
+"copyright" line and a pointer to where the full notice is found.
+
+
+ Copyright (C)
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Library General Public
+ License as published by the Free Software Foundation; either
+ version 2 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Library General Public License for more details.
+
+ You should have received a copy of the GNU Library General Public
+ License along with this library; if not, write to the Free
+ Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA.
+
+Also add information on how to contact you by electronic and paper mail.
+
+You should also get your employer (if you work as a programmer) or your
+school, if any, to sign a "copyright disclaimer" for the library, if
+necessary. Here is a sample; alter the names:
+
+ Yoyodyne, Inc., hereby disclaims all copyright interest in the
+ library `Frob' (a library for tweaking knobs) written by James Random Hacker.
+
+ , 1 April 1990
+ Ty Coon, President of Vice
+
+That's all there is to it!
diff --git a/apps/README b/apps/README
index 194be96..21d0524 100644
--- a/apps/README
+++ b/apps/README
@@ -1,5 +1,5 @@
################################################################################
-# Copyright (c) 2019-2020, NVIDIA CORPORATION. All rights reserved.
+# Copyright (c) 2019-2021, NVIDIA CORPORATION. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
@@ -25,7 +25,7 @@ DeepStream SDK Python Bindings
================================================================================
Setup pre-requisites:
- Ubuntu 18.04
-- NVIDIA DeepStream SDK 5.0 Developer Preview
+- NVIDIA DeepStream SDK 5.1
- Python 3.6
- Gst-python
@@ -55,12 +55,15 @@ The DeepStream Python package includes:
deepstream-ssd-parser
deepstream-test1-rtsp-out
deepstream-test1-usbcam
+ deepstream-opticalflow
+ deepstream-segmentation
+ deepstream-nvdsanalytics
--------------------------------------------------------------------------------
Installing Pre-requisites:
--------------------------------------------------------------------------------
-DeepStream SDK 5.0 Developer Preview
+DeepStream SDK 5.1 Developer Preview
--------------------
Download and install from https://developer.nvidia.com/deepstream-download
@@ -78,12 +81,12 @@ $ sudo apt install python3-gi python3-dev python3-gst-1.0 -y
--------------------------------------------------------------------------------
Running the samples
--------------------------------------------------------------------------------
-The apps are configured to work from inside the DeepStream SDK 5.0 installation.
+The apps are configured to work from inside the DeepStream SDK 5.1 installation.
-Clone the deepstream_python_apps repo under /sources:
+Clone the deepstream_python_apps repo under /sources:
$ git clone https://github.com/NVIDIA-AI-IOT/deepstream_python_apps
This will create the following directory:
-/sources/deepstream_python_apps
+/sources/deepstream_python_apps
Follow README in each app's directory to run the app.
@@ -99,7 +102,7 @@ The general steps are:
Release Notes at https://developer.nvidia.com/deepstream-sdk for more info.
Note that the deepstream-ssd-parser app requires the Triton docker on x86_64.
2. Run the docker with Python Bindings mapped using the following option:
- -v :/opt/nvidia/deepstream/deepstream-5.0/sources/python
+ -v :/opt/nvidia/deepstream/deepstream/sources/python
3. Inside the container, install packages required by all samples:
$ sudo apt update
$ sudo apt install python3-gi python3-dev python3-gst-1.0 -y
diff --git a/apps/deepstream-imagedata-multistream/README b/apps/deepstream-imagedata-multistream/README
index 110456c..1dd9762 100644
--- a/apps/deepstream-imagedata-multistream/README
+++ b/apps/deepstream-imagedata-multistream/README
@@ -1,5 +1,5 @@
################################################################################
-# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
+# Copyright (c) 2020-2021, NVIDIA CORPORATION. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
@@ -21,7 +21,7 @@
################################################################################
Prerequisites:
-- DeepStreamSDK 5.0
+- DeepStreamSDK 5.1
- Python 3.6
- Gst-python
- NumPy package
@@ -42,6 +42,10 @@ This document describes the sample deepstream-imagedata-multistream application.
This sample builds on top of the deepstream-test3 sample to demonstrate how to:
* Access imagedata in a multistream source
+* Modify the images in-place. Changes made to the buffer will reflect in the downstream but
+ color format, resolution and numpy transpose operations are not permitted.
+* Make a copy of the image, modify it and save to a file. These changes are made on the copy
+ of the image and will not be seen downstream.
* Extract the stream metadata, imagedata, which contains useful information about the
frames in the batched buffer.
* Annotating detected objects within certain confidence interval
diff --git a/apps/deepstream-imagedata-multistream/deepstream_imagedata-multistream.py b/apps/deepstream-imagedata-multistream/deepstream_imagedata-multistream.py
index e4918e2..eac299e 100644
--- a/apps/deepstream-imagedata-multistream/deepstream_imagedata-multistream.py
+++ b/apps/deepstream-imagedata-multistream/deepstream_imagedata-multistream.py
@@ -1,7 +1,7 @@
#!/usr/bin/env python3
################################################################################
-# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
+# Copyright (c) 2020-2021, NVIDIA CORPORATION. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
@@ -23,9 +23,11 @@
################################################################################
import sys
+
sys.path.append('../')
import gi
import configparser
+
gi.require_version('Gst', '1.0')
from gi.repository import GObject, Gst
from gi.repository import GLib
@@ -43,43 +45,47 @@
import os
import os.path
from os import path
-fps_streams={}
-frame_count={}
-saved_count={}
+
+fps_streams = {}
+frame_count = {}
+saved_count = {}
global PGIE_CLASS_ID_VEHICLE
-PGIE_CLASS_ID_VEHICLE=0
+PGIE_CLASS_ID_VEHICLE = 0
global PGIE_CLASS_ID_PERSON
-PGIE_CLASS_ID_PERSON=2
+PGIE_CLASS_ID_PERSON = 2
-MAX_DISPLAY_LEN=64
+MAX_DISPLAY_LEN = 64
PGIE_CLASS_ID_VEHICLE = 0
PGIE_CLASS_ID_BICYCLE = 1
PGIE_CLASS_ID_PERSON = 2
PGIE_CLASS_ID_ROADSIGN = 3
-MUXER_OUTPUT_WIDTH=1920
-MUXER_OUTPUT_HEIGHT=1080
-MUXER_BATCH_TIMEOUT_USEC=4000000
-TILED_OUTPUT_WIDTH=1920
-TILED_OUTPUT_HEIGHT=1080
-GST_CAPS_FEATURES_NVMM="memory:NVMM"
-pgie_classes_str= ["Vehicle", "TwoWheeler", "Person","RoadSign"]
+MUXER_OUTPUT_WIDTH = 1920
+MUXER_OUTPUT_HEIGHT = 1080
+MUXER_BATCH_TIMEOUT_USEC = 4000000
+TILED_OUTPUT_WIDTH = 1920
+TILED_OUTPUT_HEIGHT = 1080
+GST_CAPS_FEATURES_NVMM = "memory:NVMM"
+pgie_classes_str = ["Vehicle", "TwoWheeler", "Person", "RoadSign"]
+
+MIN_CONFIDENCE = 0.3
+MAX_CONFIDENCE = 0.4
# tiler_sink_pad_buffer_probe will extract metadata received on tiler src pad
# and update params for drawing rectangle, object information etc.
-def tiler_sink_pad_buffer_probe(pad,info,u_data):
- frame_number=0
- num_rects=0
+def tiler_sink_pad_buffer_probe(pad, info, u_data):
+ frame_number = 0
+ num_rects = 0
gst_buffer = info.get_buffer()
if not gst_buffer:
print("Unable to get GstBuffer ")
return
-
+
# Retrieve batch metadata from the gst_buffer
# Note that pyds.gst_buffer_get_nvds_batch_meta() expects the
# C address of gst_buffer as input, which is obtained with hash(gst_buffer)
batch_meta = pyds.gst_buffer_get_nvds_batch_meta(hash(gst_buffer))
-
+
l_frame = batch_meta.frame_meta_list
while l_frame is not None:
try:
@@ -92,155 +98,181 @@ def tiler_sink_pad_buffer_probe(pad,info,u_data):
except StopIteration:
break
- frame_number=frame_meta.frame_num
- l_obj=frame_meta.obj_meta_list
+ frame_number = frame_meta.frame_num
+ l_obj = frame_meta.obj_meta_list
num_rects = frame_meta.num_obj_meta
is_first_obj = True
save_image = False
obj_counter = {
- PGIE_CLASS_ID_VEHICLE:0,
- PGIE_CLASS_ID_PERSON:0,
- PGIE_CLASS_ID_BICYCLE:0,
- PGIE_CLASS_ID_ROADSIGN:0
+ PGIE_CLASS_ID_VEHICLE: 0,
+ PGIE_CLASS_ID_PERSON: 0,
+ PGIE_CLASS_ID_BICYCLE: 0,
+ PGIE_CLASS_ID_ROADSIGN: 0
}
while l_obj is not None:
- try:
+ try:
# Casting l_obj.data to pyds.NvDsObjectMeta
- obj_meta=pyds.NvDsObjectMeta.cast(l_obj.data)
+ obj_meta = pyds.NvDsObjectMeta.cast(l_obj.data)
except StopIteration:
break
obj_counter[obj_meta.class_id] += 1
# Periodically check for objects with borderline confidence value that may be false positive detections.
- # If such detections are found, annoate the frame with bboxes and confidence value.
+ # If such detections are found, annotate the frame with bboxes and confidence value.
# Save the annotated frame to file.
- if((saved_count["stream_"+str(frame_meta.pad_index)]%30==0) and (obj_meta.confidence>0.3 and obj_meta.confidence<0.31)):
+ if saved_count["stream_{}".format(frame_meta.pad_index)] % 30 == 0 and (
+ MIN_CONFIDENCE < obj_meta.confidence < MAX_CONFIDENCE):
if is_first_obj:
is_first_obj = False
# Getting Image data using nvbufsurface
# the input should be address of buffer and batch_id
- n_frame=pyds.get_nvds_buf_surface(hash(gst_buffer),frame_meta.batch_id)
- #convert python array into numy array format.
- frame_image=np.array(n_frame,copy=True,order='C')
- #covert the array into cv2 default color format
- frame_image=cv2.cvtColor(frame_image,cv2.COLOR_RGBA2BGRA)
+ n_frame = pyds.get_nvds_buf_surface(hash(gst_buffer), frame_meta.batch_id)
+ n_frame = draw_bounding_boxes(n_frame, obj_meta, obj_meta.confidence)
+ # convert python array into numpy array format in the copy mode.
+ frame_copy = np.array(n_frame, copy=True, order='C')
+ # convert the array into cv2 default color format
+ frame_copy = cv2.cvtColor(frame_copy, cv2.COLOR_RGBA2BGRA)
+
save_image = True
- frame_image=draw_bounding_boxes(frame_image,obj_meta,obj_meta.confidence)
- try:
- l_obj=l_obj.next
+
+ try:
+ l_obj = l_obj.next
except StopIteration:
break
- print("Frame Number=", frame_number, "Number of Objects=",num_rects,"Vehicle_count=",obj_counter[PGIE_CLASS_ID_VEHICLE],"Person_count=",obj_counter[PGIE_CLASS_ID_PERSON])
+ print("Frame Number=", frame_number, "Number of Objects=", num_rects, "Vehicle_count=",
+ obj_counter[PGIE_CLASS_ID_VEHICLE], "Person_count=", obj_counter[PGIE_CLASS_ID_PERSON])
# Get frame rate through this probe
fps_streams["stream{0}".format(frame_meta.pad_index)].get_fps()
if save_image:
- cv2.imwrite(folder_name+"/stream_"+str(frame_meta.pad_index)+"/frame_"+str(frame_number)+".jpg",frame_image)
- saved_count["stream_"+str(frame_meta.pad_index)]+=1
+ img_path = "{}/stream_{}/frame_{}.jpg".format(folder_name, frame_meta.pad_index, frame_number)
+ cv2.imwrite(img_path, frame_copy)
+ saved_count["stream_{}".format(frame_meta.pad_index)] += 1
try:
- l_frame=l_frame.next
+ l_frame = l_frame.next
except StopIteration:
break
return Gst.PadProbeReturn.OK
-def draw_bounding_boxes(image,obj_meta,confidence):
- confidence='{0:.2f}'.format(confidence)
- rect_params=obj_meta.rect_params
- top=int(rect_params.top)
- left=int(rect_params.left)
- width=int(rect_params.width)
- height=int(rect_params.height)
- obj_name=pgie_classes_str[obj_meta.class_id]
- image=cv2.rectangle(image,(left,top),(left+width,top+height),(0,0,255,0),2)
+
+def draw_bounding_boxes(image, obj_meta, confidence):
+ confidence = '{0:.2f}'.format(confidence)
+ rect_params = obj_meta.rect_params
+ top = int(rect_params.top)
+ left = int(rect_params.left)
+ width = int(rect_params.width)
+ height = int(rect_params.height)
+ obj_name = pgie_classes_str[obj_meta.class_id]
+ # image = cv2.rectangle(image, (left, top), (left + width, top + height), (0, 0, 255, 0), 2, cv2.LINE_4)
+ color = (0, 0, 255, 0)
+ w_percents = int(width * 0.05) if width > 100 else int(width * 0.1)
+ h_percents = int(height * 0.05) if height > 100 else int(height * 0.1)
+ linetop_c1 = (left + w_percents, top)
+ linetop_c2 = (left + width - w_percents, top)
+ image = cv2.line(image, linetop_c1, linetop_c2, color, 6)
+ linebot_c1 = (left + w_percents, top + height)
+ linebot_c2 = (left + width - w_percents, top + height)
+ image = cv2.line(image, linebot_c1, linebot_c2, color, 6)
+ lineleft_c1 = (left, top + h_percents)
+ lineleft_c2 = (left, top + height - h_percents)
+ image = cv2.line(image, lineleft_c1, lineleft_c2, color, 6)
+ lineright_c1 = (left + width, top + h_percents)
+ lineright_c2 = (left + width, top + height - h_percents)
+ image = cv2.line(image, lineright_c1, lineright_c2, color, 6)
# Note that on some systems cv2.putText erroneously draws horizontal lines across the image
- image=cv2.putText(image,obj_name+',C='+str(confidence),(left-10,top-10),cv2.FONT_HERSHEY_SIMPLEX,0.5,(0,0,255,0),2)
+ image = cv2.putText(image, obj_name + ',C=' + str(confidence), (left - 10, top - 10), cv2.FONT_HERSHEY_SIMPLEX, 0.5,
+ (0, 0, 255, 0), 2)
return image
-def cb_newpad(decodebin, decoder_src_pad,data):
+
+def cb_newpad(decodebin, decoder_src_pad, data):
print("In cb_newpad\n")
- caps=decoder_src_pad.get_current_caps()
- gststruct=caps.get_structure(0)
- gstname=gststruct.get_name()
- source_bin=data
- features=caps.get_features(0)
+ caps = decoder_src_pad.get_current_caps()
+ gststruct = caps.get_structure(0)
+ gstname = gststruct.get_name()
+ source_bin = data
+ features = caps.get_features(0)
# Need to check if the pad created by the decodebin is for video and not
# audio.
- if(gstname.find("video")!=-1):
+ if (gstname.find("video") != -1):
# Link the decodebin pad only if decodebin has picked nvidia
# decoder plugin nvdec_*. We do this by checking if the pad caps contain
# NVMM memory features.
if features.contains("memory:NVMM"):
# Get the source bin ghost pad
- bin_ghost_pad=source_bin.get_static_pad("src")
+ bin_ghost_pad = source_bin.get_static_pad("src")
if not bin_ghost_pad.set_target(decoder_src_pad):
sys.stderr.write("Failed to link decoder src pad to source bin ghost pad\n")
else:
sys.stderr.write(" Error: Decodebin did not pick nvidia decoder plugin.\n")
-def decodebin_child_added(child_proxy,Object,name,user_data):
+
+def decodebin_child_added(child_proxy, Object, name, user_data):
print("Decodebin child added:", name, "\n")
- if(name.find("decodebin") != -1):
- Object.connect("child-added",decodebin_child_added,user_data)
- if(is_aarch64() and name.find("nvv4l2decoder") != -1):
+ if name.find("decodebin") != -1:
+ Object.connect("child-added", decodebin_child_added, user_data)
+ if is_aarch64() and name.find("nvv4l2decoder") != -1:
print("Seting bufapi_version\n")
- Object.set_property("bufapi-version",True)
+ Object.set_property("bufapi-version", True)
+
-def create_source_bin(index,uri):
+def create_source_bin(index, uri):
print("Creating source bin")
# Create a source GstBin to abstract this bin's content from the rest of the
# pipeline
- bin_name="source-bin-%02d" %index
+ bin_name = "source-bin-%02d" % index
print(bin_name)
- nbin=Gst.Bin.new(bin_name)
+ nbin = Gst.Bin.new(bin_name)
if not nbin:
sys.stderr.write(" Unable to create source bin \n")
# Source element for reading from the uri.
# We will use decodebin and let it figure out the container format of the
# stream and the codec and plug the appropriate demux and decode plugins.
- uri_decode_bin=Gst.ElementFactory.make("uridecodebin", "uri-decode-bin")
+ uri_decode_bin = Gst.ElementFactory.make("uridecodebin", "uri-decode-bin")
if not uri_decode_bin:
sys.stderr.write(" Unable to create uri decode bin \n")
# We set the input uri to the source element
- uri_decode_bin.set_property("uri",uri)
+ uri_decode_bin.set_property("uri", uri)
# Connect to the "pad-added" signal of the decodebin which generates a
# callback once a new pad for raw data has beed created by the decodebin
- uri_decode_bin.connect("pad-added",cb_newpad,nbin)
- uri_decode_bin.connect("child-added",decodebin_child_added,nbin)
+ uri_decode_bin.connect("pad-added", cb_newpad, nbin)
+ uri_decode_bin.connect("child-added", decodebin_child_added, nbin)
# We need to create a ghost pad for the source bin which will act as a proxy
# for the video decoder src pad. The ghost pad will not have a target right
# now. Once the decode bin creates the video decoder and generates the
# cb_newpad callback, we will set the ghost pad target to the video decoder
# src pad.
- Gst.Bin.add(nbin,uri_decode_bin)
- bin_pad=nbin.add_pad(Gst.GhostPad.new_no_target("src",Gst.PadDirection.SRC))
+ Gst.Bin.add(nbin, uri_decode_bin)
+ bin_pad = nbin.add_pad(Gst.GhostPad.new_no_target("src", Gst.PadDirection.SRC))
if not bin_pad:
sys.stderr.write(" Failed to add ghost pad in source bin \n")
return None
return nbin
+
def main(args):
# Check input arguments
if len(args) < 2:
sys.stderr.write("usage: %s [uri2] ... [uriN] \n" % args[0])
sys.exit(1)
- for i in range(0,len(args)-2):
- fps_streams["stream{0}".format(i)]=GETFPS(i)
- number_sources=len(args)-2
+ for i in range(0, len(args) - 2):
+ fps_streams["stream{0}".format(i)] = GETFPS(i)
+ number_sources = len(args) - 2
global folder_name
- folder_name=args[-1]
+ folder_name = args[-1]
if path.exists(folder_name):
sys.stderr.write("The output folder %s already exists. Please remove it first.\n" % folder_name)
sys.exit(1)
os.mkdir(folder_name)
- print("Frames will be saved in ",folder_name)
+ print("Frames will be saved in ", folder_name)
# Standard GStreamer initialization
GObject.threads_init()
Gst.init(None)
@@ -262,22 +294,22 @@ def main(args):
pipeline.add(streammux)
for i in range(number_sources):
- os.mkdir(folder_name+"/stream_"+str(i))
- frame_count["stream_"+str(i)]=0
- saved_count["stream_"+str(i)]=0
- print("Creating source_bin ",i," \n ")
- uri_name=args[i+1]
- if uri_name.find("rtsp://") == 0 :
+ os.mkdir(folder_name + "/stream_" + str(i))
+ frame_count["stream_" + str(i)] = 0
+ saved_count["stream_" + str(i)] = 0
+ print("Creating source_bin ", i, " \n ")
+ uri_name = args[i + 1]
+ if uri_name.find("rtsp://") == 0:
is_live = True
- source_bin=create_source_bin(i, uri_name)
+ source_bin = create_source_bin(i, uri_name)
if not source_bin:
sys.stderr.write("Unable to create source bin \n")
pipeline.add(source_bin)
- padname="sink_%u" %i
- sinkpad= streammux.get_request_pad(padname)
+ padname = "sink_%u" % i
+ sinkpad = streammux.get_request_pad(padname)
if not sinkpad:
sys.stderr.write("Unable to create sink pad bin \n")
- srcpad=source_bin.get_static_pad("src")
+ srcpad = source_bin.get_static_pad("src")
if not srcpad:
sys.stderr.write("Unable to create src pad bin \n")
srcpad.link(sinkpad)
@@ -298,7 +330,7 @@ def main(args):
sys.stderr.write(" Unable to get the caps filter1 \n")
filter1.set_property("caps", caps1)
print("Creating tiler \n ")
- tiler=Gst.ElementFactory.make("nvmultistreamtiler", "nvtiler")
+ tiler = Gst.ElementFactory.make("nvmultistreamtiler", "nvtiler")
if not tiler:
sys.stderr.write(" Unable to create tiler \n")
print("Creating nvvidconv \n ")
@@ -309,9 +341,9 @@ def main(args):
nvosd = Gst.ElementFactory.make("nvdsosd", "onscreendisplay")
if not nvosd:
sys.stderr.write(" Unable to create nvosd \n")
- if(is_aarch64()):
+ if (is_aarch64()):
print("Creating transform \n ")
- transform=Gst.ElementFactory.make("nvegltransform", "nvegl-transform")
+ transform = Gst.ElementFactory.make("nvegltransform", "nvegl-transform")
if not transform:
sys.stderr.write(" Unable to create transform \n")
@@ -329,14 +361,15 @@ def main(args):
streammux.set_property('batch-size', number_sources)
streammux.set_property('batched-push-timeout', 4000000)
pgie.set_property('config-file-path', "dstest_imagedata_config.txt")
- pgie_batch_size=pgie.get_property("batch-size")
- if(pgie_batch_size != number_sources):
- print("WARNING: Overriding infer-config batch-size",pgie_batch_size," with number of sources ", number_sources," \n")
- pgie.set_property("batch-size",number_sources)
- tiler_rows=int(math.sqrt(number_sources))
- tiler_columns=int(math.ceil((1.0*number_sources)/tiler_rows))
- tiler.set_property("rows",tiler_rows)
- tiler.set_property("columns",tiler_columns)
+ pgie_batch_size = pgie.get_property("batch-size")
+ if (pgie_batch_size != number_sources):
+ print("WARNING: Overriding infer-config batch-size", pgie_batch_size, " with number of sources ",
+ number_sources, " \n")
+ pgie.set_property("batch-size", number_sources)
+ tiler_rows = int(math.sqrt(number_sources))
+ tiler_columns = int(math.ceil((1.0 * number_sources) / tiler_rows))
+ tiler.set_property("rows", tiler_rows)
+ tiler.set_property("columns", tiler_columns)
tiler.set_property("width", TILED_OUTPUT_WIDTH)
tiler.set_property("height", TILED_OUTPUT_HEIGHT)
@@ -363,7 +396,7 @@ def main(args):
pipeline.add(sink)
print("Linking elements in the Pipeline \n")
- streammux.link(pgie)
+ streammux.link(pgie)
pgie.link(nvvidconv1)
nvvidconv1.link(filter1)
filter1.link(tiler)
@@ -379,9 +412,9 @@ def main(args):
loop = GObject.MainLoop()
bus = pipeline.get_bus()
bus.add_signal_watch()
- bus.connect ("message", bus_call, loop)
+ bus.connect("message", bus_call, loop)
- tiler_sink_pad=tiler.get_static_pad("sink")
+ tiler_sink_pad = tiler.get_static_pad("sink")
if not tiler_sink_pad:
sys.stderr.write(" Unable to get src pad \n")
else:
@@ -390,7 +423,7 @@ def main(args):
# List the sources
print("Now playing...")
for i, source in enumerate(args[:-1]):
- if (i != 0):
+ if i != 0:
print(i, ": ", source)
print("Starting pipeline \n")
@@ -404,5 +437,6 @@ def main(args):
print("Exiting app\n")
pipeline.set_state(Gst.State.NULL)
+
if __name__ == '__main__':
sys.exit(main(sys.argv))
diff --git a/apps/deepstream-imagedata-multistream/dstest_imagedata_config.txt b/apps/deepstream-imagedata-multistream/dstest_imagedata_config.txt
index fc03f21..2c2a086 100644
--- a/apps/deepstream-imagedata-multistream/dstest_imagedata_config.txt
+++ b/apps/deepstream-imagedata-multistream/dstest_imagedata_config.txt
@@ -74,11 +74,11 @@ num-detected-classes=4
interval=0
gie-unique-id=1
output-blob-names=conv2d_bbox;conv2d_cov/Sigmoid
-## 0=Group Rectangles, 1=DBSCAN, 2=NMS, 3 = None(No clustering)
-cluster-mode=1
+## 0=Group Rectangles, 1=DBSCAN, 2=NMS, 3= DBSCAN+NMS Hybrid, 4 = None(No clustering)
+cluster-mode=1
[class-attrs-all]
-threshold=0.2
+pre-cluster-threshold=0.2
eps=0.7
minBoxes=1
diff --git a/apps/deepstream-nvdsanalytics/README b/apps/deepstream-nvdsanalytics/README
new file mode 100644
index 0000000..5ceaf7f
--- /dev/null
+++ b/apps/deepstream-nvdsanalytics/README
@@ -0,0 +1,75 @@
+################################################################################
+# Copyright (c) 2019-2021, NVIDIA CORPORATION. All rights reserved.
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the "Software"),
+# to deal in the Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish, distribute, sublicense,
+# and/or sell copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+# DEALINGS IN THE SOFTWARE.
+################################################################################
+
+Prerequisites:
+- DeepStreamSDK 5.1
+- Python 3.6
+- Gst-python
+
+To run:
+ $ python3 deepstream_nvdsanalytics.py [uri2] ... [uriN]
+e.g.
+ $ python3 deepstream_nvdsanalytics.py file:///home/ubuntu/video1.mp4 file:///home/ubuntu/video2.mp4
+ $ python3 deepstream_nvdsanalytics.py rtsp://127.0.0.1/video1 rtsp://127.0.0.1/video2
+
+This document describes the sample deepstream-nvdsanalytics application.
+
+This sample builds on top of the deepstream-test3 sample to demonstrate how to:
+
+* Use multiple sources in the pipeline.
+* Use a uridecodebin so that any type of input (e.g. RTSP/File), any GStreamer
+ supported container format, and any codec can be used as input.
+* Configure the stream-muxer to generate a batch of frames and infer on the
+ batch for better resource utilization.
+* Configure the tracker (referred to as nvtracker in this sample) uses
+ config file dsnvanalytics_tracker_config.txt
+* Configure the nvdsanalytics plugin (referred to as nvanalytics in this sample)
+ uses config file config_nvdsanalytics.txt
+* Extract the stream metadata, which contains useful information about the
+ objects and frames in the batched buffer.
+
+This sample accepts one or more H.264/H.265 video streams as input. It creates
+a source bin for each input and connects the bins to an instance of the
+"nvstreammux" element, which forms the batch of frames. The batch of
+frames is fed to "nvinfer" for batched inferencing. The batched buffer is
+used as input for "nvtracker" which adds object tracking, which is then fed into
+"nvdsanalytics" element which runs analytics algorithms on these objects.
+This output is then composited into a 2D tile array using "nvmultistreamtiler."
+The rest of the pipeline is similar to the deepstream-test3 sample.
+
+The "width" and "height" properties must be set on the stream-muxer to set the
+output resolution. If the input frame resolution is different from
+stream-muxer's "width" and "height", the input frame will be scaled to muxer's
+output resolution.
+
+The stream-muxer waits for a user-defined timeout before forming the batch. The
+timeout is set using the "batched-push-timeout" property. If the complete batch
+is formed before the timeout is reached, the batch is pushed to the downstream
+element. If the timeout is reached before the complete batch can be formed
+(which can happen in case of rtsp sources), the batch is formed from the
+available input buffers and pushed. Ideally, the timeout of the stream-muxer
+should be set based on the framerate of the fastest source. It can also be set
+to -1 to make the stream-muxer wait infinitely.
+
+The "nvmultistreamtiler" composite streams based on their stream-ids in
+row-major order (starting from stream 0, left to right across the top row, then
+across the next row, etc.).
diff --git a/apps/deepstream-nvdsanalytics/config_nvdsanalytics.txt b/apps/deepstream-nvdsanalytics/config_nvdsanalytics.txt
new file mode 100644
index 0000000..771f0bf
--- /dev/null
+++ b/apps/deepstream-nvdsanalytics/config_nvdsanalytics.txt
@@ -0,0 +1,108 @@
+################################################################################
+# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the "Software"),
+# to deal in the Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish, distribute, sublicense,
+# and/or sell copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+# DEALINGS IN THE SOFTWARE.
+################################################################################
+
+# The values in the config file are overridden by values set through GObject
+# properties.
+
+[property]
+enable=1
+#Width height used for configuration to which below configs are configured
+config-width=1920
+config-height=1080
+#osd-mode 0: Dont display any lines, rois and text
+# 1: Display only lines, rois and static text i.e. labels
+# 2: Display all info from 1 plus information about counts
+osd-mode=2
+#Set OSD font size that has to be displayed
+display-font-size=12
+
+## Per stream configuration
+[roi-filtering-stream-0]
+#enable or disable following feature
+enable=1
+#ROI to filter select objects, and remove from meta data
+roi-RF=295;643;579;634;642;913;56;828
+#remove objects in the ROI
+inverse-roi=0
+class-id=-1
+
+## Per stream configuration
+[roi-filtering-stream-2]
+#enable or disable following feature
+enable=1
+#ROI to filter select objects, and remove from meta data
+roi-RF=295;643;579;634;642;913;56;828
+#remove objects in the ROI
+inverse-roi=1
+class-id=0
+
+[overcrowding-stream-1]
+enable=1
+roi-OC=295;643;579;634;642;913;56;828
+#no of objects that will trigger OC
+object-threshold=2
+class-id=-1
+
+[line-crossing-stream-0]
+enable=1
+#Label;direction;lc
+#line-crossing-Entry=1072;911;1143;1058;944;1020;1297;1020;
+line-crossing-Exit=789;672;1084;900;851;773;1203;732
+class-id=0
+#extended when 0- only counts crossing on the configured Line
+# 1- assumes extended Line crossing counts all the crossing
+extended=0
+#LC modes supported:
+#loose : counts all crossing without strong adherence to direction
+#balanced: Strict direction adherence expected compared to mode=loose
+#strict : Strict direction adherence expected compared to mode=balanced
+mode=loose
+
+[line-crossing-stream-1]
+enable=1
+#Label;direction;lc
+#line-crossing-Entry=1072;911;1143;1058;944;1020;1297;1020;
+line-crossing-Exit=789;672;1084;900;851;773;1203;732
+class-id=0
+#extended when 0- only counts crossing on the configured Line
+# 1- assumes extended Line crossing counts all the crossing
+extended=0
+#LC modes supported:
+#loose : counts all crossing without strong adherence to direction
+#balanced: Strict direction adherence expected compared to mode=loose
+#strict : Strict direction adherence expected compared to mode=balanced
+mode=loose
+
+[direction-detection-stream-0]
+enable=1
+#Label;direction;
+direction-South=284;840;360;662;
+direction-North=1106;622;1312;701;
+class-id=0
+
+[direction-detection-stream-1]
+enable=1
+#Label;direction;
+direction-South=284;840;360;662;
+direction-North=1106;622;1312;701;
+class-id=0
+
diff --git a/apps/deepstream-nvdsanalytics/config_nvdsanalytics_c02.txt b/apps/deepstream-nvdsanalytics/config_nvdsanalytics_c02.txt
new file mode 100644
index 0000000..dd1c849
--- /dev/null
+++ b/apps/deepstream-nvdsanalytics/config_nvdsanalytics_c02.txt
@@ -0,0 +1,98 @@
+################################################################################
+# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the "Software"),
+# to deal in the Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish, distribute, sublicense,
+# and/or sell copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+# DEALINGS IN THE SOFTWARE.
+################################################################################
+
+# The values in the config file are overridden by values set through GObject
+# properties.
+
+[property]
+#Width height used for configuration to which below configs are configured
+enable=1
+config-width=1920
+config-height=1080
+
+## Per stream configuration
+[roi-filtering-stream-0]
+#enable or disable following feature
+enable=1
+#ROI to filter select objects, and remove from meta data
+#roi-RF=769;798;1046;772;1204;884;800;951
+roi-C02=673;355;1198;365;1293;708;498;731;
+#roi-RF2=769;798;1046;772;1204;884;800;951
+#remove objects in the ROI
+inverse-roi=0
+class-id=-1
+
+
+
+## Per stream configuration
+[roi-filtering-stream-2]
+#enable or disable following feature
+enable=1
+#ROI to filter select objects, and remove from meta data
+roi-RF4=769;798;1046;772;1204;884;800;951
+#remove objects in the ROI
+inverse-roi=1
+class-id=0
+
+
+
+[overcrowding-stream-1]
+enable=0
+roi-OC=673;355;1198;365;1293;708;498;731;
+#time threshold after which OC event will be triggered
+time-threshold=2000
+#no of objects that will trigger OC
+object-threshold=2
+class-id=-1
+
+[line-crossing-stream-1]
+enable=1
+#Label;direction;lc
+#line-crossing-Entry=772;799;819;946;623;952;1061;926
+line-crossing-Entry=936;464;954;183;646;419;1231;420
+#line-crossing-Exit=0;0;0;0;0;0;0;0;0
+class-id=0
+
+[direction-detection-stream-0]
+enable=1
+#Label;direction;
+#direction-TowardsExit=200;500;200;1000
+#direction-South=400;500;400;1050
+direction-South=250;206;118;306;
+direction-North=379;313;455;218;
+#This will overwrite prev North config.
+#direction-East=600;500;600;1050
+class-id=0
+
+
+[direction-detection-stream-1]
+enable=1
+#Label;direction;
+#direction-TowardsExit=200;500;200;1000
+#direction-South=400;500;400;1050
+direction-South=250;206;118;306;
+direction-North=379;313;455;218;
+#This will overwrite prev North config.
+#direction-East=600;500;600;1050
+class-id=0
+
+
diff --git a/apps/deepstream-nvdsanalytics/deepstream_nvdsanalytics.py b/apps/deepstream-nvdsanalytics/deepstream_nvdsanalytics.py
new file mode 100644
index 0000000..07aec0d
--- /dev/null
+++ b/apps/deepstream-nvdsanalytics/deepstream_nvdsanalytics.py
@@ -0,0 +1,447 @@
+#!/usr/bin/env python3
+
+################################################################################
+# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the "Software"),
+# to deal in the Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish, distribute, sublicense,
+# and/or sell copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+# DEALINGS IN THE SOFTWARE.
+################################################################################
+
+import sys
+sys.path.append('../')
+import gi
+import configparser
+gi.require_version('Gst', '1.0')
+from gi.repository import GObject, Gst
+from gi.repository import GLib
+from ctypes import *
+import time
+import sys
+import math
+import platform
+from common.is_aarch_64 import is_aarch64
+from common.bus_call import bus_call
+from common.FPS import GETFPS
+
+import pyds
+
+fps_streams={}
+
+MAX_DISPLAY_LEN=64
+PGIE_CLASS_ID_VEHICLE = 0
+PGIE_CLASS_ID_BICYCLE = 1
+PGIE_CLASS_ID_PERSON = 2
+PGIE_CLASS_ID_ROADSIGN = 3
+MUXER_OUTPUT_WIDTH=1920
+MUXER_OUTPUT_HEIGHT=1080
+MUXER_BATCH_TIMEOUT_USEC=4000000
+TILED_OUTPUT_WIDTH=1280
+TILED_OUTPUT_HEIGHT=720
+GST_CAPS_FEATURES_NVMM="memory:NVMM"
+OSD_PROCESS_MODE= 0
+OSD_DISPLAY_TEXT= 1
+pgie_classes_str= ["Vehicle", "TwoWheeler", "Person","RoadSign"]
+
+# nvanlytics_src_pad_buffer_probe will extract metadata received on nvtiler sink pad
+# and update params for drawing rectangle, object information etc.
+def nvanalytics_src_pad_buffer_probe(pad,info,u_data):
+ frame_number=0
+ num_rects=0
+ gst_buffer = info.get_buffer()
+ if not gst_buffer:
+ print("Unable to get GstBuffer ")
+ return
+
+ # Retrieve batch metadata from the gst_buffer
+ # Note that pyds.gst_buffer_get_nvds_batch_meta() expects the
+ # C address of gst_buffer as input, which is obtained with hash(gst_buffer)
+ batch_meta = pyds.gst_buffer_get_nvds_batch_meta(hash(gst_buffer))
+ l_frame = batch_meta.frame_meta_list
+
+ while l_frame:
+ try:
+ # Note that l_frame.data needs a cast to pyds.NvDsFrameMeta
+ # The casting is done by pyds.NvDsFrameMeta.cast()
+ # The casting also keeps ownership of the underlying memory
+ # in the C code, so the Python garbage collector will leave
+ # it alone.
+ frame_meta = pyds.NvDsFrameMeta.cast(l_frame.data)
+ except StopIteration:
+ break
+
+ frame_number=frame_meta.frame_num
+ l_obj=frame_meta.obj_meta_list
+ num_rects = frame_meta.num_obj_meta
+ obj_counter = {
+ PGIE_CLASS_ID_VEHICLE:0,
+ PGIE_CLASS_ID_PERSON:0,
+ PGIE_CLASS_ID_BICYCLE:0,
+ PGIE_CLASS_ID_ROADSIGN:0
+ }
+ print("#"*50)
+ while l_obj:
+ try:
+ # Note that l_obj.data needs a cast to pyds.NvDsObjectMeta
+ # The casting is done by pyds.NvDsObjectMeta.cast()
+ obj_meta=pyds.NvDsObjectMeta.cast(l_obj.data)
+ except StopIteration:
+ break
+ obj_counter[obj_meta.class_id] += 1
+ l_user_meta = obj_meta.obj_user_meta_list
+ # Extract object level meta data from NvDsAnalyticsObjInfo
+ while l_user_meta:
+ try:
+ user_meta = pyds.NvDsUserMeta.cast(l_user_meta.data)
+ if user_meta.base_meta.meta_type == pyds.nvds_get_user_meta_type("NVIDIA.DSANALYTICSOBJ.USER_META"):
+ user_meta_data = pyds.NvDsAnalyticsObjInfo.cast(user_meta.user_meta_data)
+ if user_meta_data.dirStatus: print("Object {0} moving in direction: {1}".format(obj_meta.object_id, user_meta_data.dirStatus))
+ if user_meta_data.lcStatus: print("Object {0} line crossing status: {1}".format(obj_meta.object_id, user_meta_data.lcStatus))
+ if user_meta_data.ocStatus: print("Object {0} overcrowding status: {1}".format(obj_meta.object_id, user_meta_data.ocStatus))
+ if user_meta_data.roiStatus: print("Object {0} roi status: {1}".format(obj_meta.object_id, user_meta_data.roiStatus))
+ except StopIteration:
+ break
+
+ try:
+ l_user_meta = l_user_meta.next
+ except StopIteration:
+ break
+ try:
+ l_obj=l_obj.next
+ except StopIteration:
+ break
+
+ # Get meta data from NvDsAnalyticsFrameMeta
+ l_user = frame_meta.frame_user_meta_list
+ while l_user:
+ try:
+ user_meta = pyds.NvDsUserMeta.cast(l_user.data)
+ if user_meta.base_meta.meta_type == pyds.nvds_get_user_meta_type("NVIDIA.DSANALYTICSFRAME.USER_META"):
+ user_meta_data = pyds.NvDsAnalyticsFrameMeta.cast(user_meta.user_meta_data)
+ if user_meta_data.objInROIcnt: print("Objs in ROI: {0}".format(user_meta_data.objInROIcnt))
+ if user_meta_data.objLCCumCnt: print("Linecrossing Cumulative: {0}".format(user_meta_data.objLCCumCnt))
+ if user_meta_data.objLCCurrCnt: print("Linecrossing Current Frame: {0}".format(user_meta_data.objLCCurrCnt))
+ if user_meta_data.ocStatus: print("Overcrowding status: {0}".format(user_meta_data.ocStatus))
+ except StopIteration:
+ break
+ try:
+ l_user = l_user.next
+ except StopIteration:
+ break
+
+ print("Frame Number=", frame_number, "stream id=", frame_meta.pad_index, "Number of Objects=",num_rects,"Vehicle_count=",obj_counter[PGIE_CLASS_ID_VEHICLE],"Person_count=",obj_counter[PGIE_CLASS_ID_PERSON])
+ # Get frame rate through this probe
+ fps_streams["stream{0}".format(frame_meta.pad_index)].get_fps()
+ try:
+ l_frame=l_frame.next
+ except StopIteration:
+ break
+ print("#"*50)
+
+ return Gst.PadProbeReturn.OK
+
+
+
+def cb_newpad(decodebin, decoder_src_pad,data):
+ print("In cb_newpad\n")
+ caps=decoder_src_pad.get_current_caps()
+ gststruct=caps.get_structure(0)
+ gstname=gststruct.get_name()
+ source_bin=data
+ features=caps.get_features(0)
+
+ # Need to check if the pad created by the decodebin is for video and not
+ # audio.
+ print("gstname=",gstname)
+ if(gstname.find("video")!=-1):
+ # Link the decodebin pad only if decodebin has picked nvidia
+ # decoder plugin nvdec_*. We do this by checking if the pad caps contain
+ # NVMM memory features.
+ print("features=",features)
+ if features.contains("memory:NVMM"):
+ # Get the source bin ghost pad
+ bin_ghost_pad=source_bin.get_static_pad("src")
+ if not bin_ghost_pad.set_target(decoder_src_pad):
+ sys.stderr.write("Failed to link decoder src pad to source bin ghost pad\n")
+ else:
+ sys.stderr.write(" Error: Decodebin did not pick nvidia decoder plugin.\n")
+
+def decodebin_child_added(child_proxy,Object,name,user_data):
+ print("Decodebin child added:", name, "\n")
+ if(name.find("decodebin") != -1):
+ Object.connect("child-added",decodebin_child_added,user_data)
+ if(is_aarch64() and name.find("nvv4l2decoder") != -1):
+ print("Seting bufapi_version\n")
+ Object.set_property("bufapi-version",True)
+
+def create_source_bin(index,uri):
+ print("Creating source bin")
+
+ # Create a source GstBin to abstract this bin's content from the rest of the
+ # pipeline
+ bin_name="source-bin-%02d" %index
+ print(bin_name)
+ nbin=Gst.Bin.new(bin_name)
+ if not nbin:
+ sys.stderr.write(" Unable to create source bin \n")
+
+ # Source element for reading from the uri.
+ # We will use decodebin and let it figure out the container format of the
+ # stream and the codec and plug the appropriate demux and decode plugins.
+ uri_decode_bin=Gst.ElementFactory.make("uridecodebin", "uri-decode-bin")
+ if not uri_decode_bin:
+ sys.stderr.write(" Unable to create uri decode bin \n")
+ # We set the input uri to the source element
+ uri_decode_bin.set_property("uri",uri)
+ # Connect to the "pad-added" signal of the decodebin which generates a
+ # callback once a new pad for raw data has beed created by the decodebin
+ uri_decode_bin.connect("pad-added",cb_newpad,nbin)
+ uri_decode_bin.connect("child-added",decodebin_child_added,nbin)
+
+ # We need to create a ghost pad for the source bin which will act as a proxy
+ # for the video decoder src pad. The ghost pad will not have a target right
+ # now. Once the decode bin creates the video decoder and generates the
+ # cb_newpad callback, we will set the ghost pad target to the video decoder
+ # src pad.
+ Gst.Bin.add(nbin,uri_decode_bin)
+ bin_pad=nbin.add_pad(Gst.GhostPad.new_no_target("src",Gst.PadDirection.SRC))
+ if not bin_pad:
+ sys.stderr.write(" Failed to add ghost pad in source bin \n")
+ return None
+ return nbin
+
+def main(args):
+ # Check input arguments
+ if len(args) < 2:
+ sys.stderr.write("usage: %s [uri2] ... [uriN]\n" % args[0])
+ sys.exit(1)
+
+ for i in range(0,len(args)-1):
+ fps_streams["stream{0}".format(i)]=GETFPS(i)
+ number_sources=len(args)-1
+
+ # Standard GStreamer initialization
+ GObject.threads_init()
+ Gst.init(None)
+
+ # Create gstreamer elements */
+ # Create Pipeline element that will form a connection of other elements
+ print("Creating Pipeline \n ")
+ pipeline = Gst.Pipeline()
+ is_live = False
+
+ if not pipeline:
+ sys.stderr.write(" Unable to create Pipeline \n")
+ print("Creating streamux \n ")
+
+ # Create nvstreammux instance to form batches from one or more sources.
+ streammux = Gst.ElementFactory.make("nvstreammux", "Stream-muxer")
+ if not streammux:
+ sys.stderr.write(" Unable to create NvStreamMux \n")
+
+ pipeline.add(streammux)
+ for i in range(number_sources):
+ print("Creating source_bin ",i," \n ")
+ uri_name=args[i+1]
+ if uri_name.find("rtsp://") == 0 :
+ is_live = True
+ source_bin=create_source_bin(i, uri_name)
+ if not source_bin:
+ sys.stderr.write("Unable to create source bin \n")
+ pipeline.add(source_bin)
+ padname="sink_%u" %i
+ sinkpad= streammux.get_request_pad(padname)
+ if not sinkpad:
+ sys.stderr.write("Unable to create sink pad bin \n")
+ srcpad=source_bin.get_static_pad("src")
+ if not srcpad:
+ sys.stderr.write("Unable to create src pad bin \n")
+ srcpad.link(sinkpad)
+ queue1=Gst.ElementFactory.make("queue","queue1")
+ queue2=Gst.ElementFactory.make("queue","queue2")
+ queue3=Gst.ElementFactory.make("queue","queue3")
+ queue4=Gst.ElementFactory.make("queue","queue4")
+ queue5=Gst.ElementFactory.make("queue","queue5")
+ queue6=Gst.ElementFactory.make("queue","queue6")
+ queue7=Gst.ElementFactory.make("queue","queue7")
+ pipeline.add(queue1)
+ pipeline.add(queue2)
+ pipeline.add(queue3)
+ pipeline.add(queue4)
+ pipeline.add(queue5)
+ pipeline.add(queue6)
+ pipeline.add(queue7)
+
+ print("Creating Pgie \n ")
+ pgie = Gst.ElementFactory.make("nvinfer", "primary-inference")
+ if not pgie:
+ sys.stderr.write(" Unable to create pgie \n")
+
+ print("Creating nvtracker \n ")
+ tracker = Gst.ElementFactory.make("nvtracker", "tracker")
+ if not tracker:
+ sys.stderr.write(" Unable to create tracker \n")
+
+ print("Creating nvdsanalytics \n ")
+ nvanalytics = Gst.ElementFactory.make("nvdsanalytics", "analytics")
+ if not nvanalytics:
+ sys.stderr.write(" Unable to create nvanalytics \n")
+ nvanalytics.set_property("config-file", "config_nvdsanalytics.txt")
+
+ print("Creating tiler \n ")
+ tiler=Gst.ElementFactory.make("nvmultistreamtiler", "nvtiler")
+ if not tiler:
+ sys.stderr.write(" Unable to create tiler \n")
+
+ print("Creating nvvidconv \n ")
+ nvvidconv = Gst.ElementFactory.make("nvvideoconvert", "convertor")
+ if not nvvidconv:
+ sys.stderr.write(" Unable to create nvvidconv \n")
+
+ print("Creating nvosd \n ")
+ nvosd = Gst.ElementFactory.make("nvdsosd", "onscreendisplay")
+ if not nvosd:
+ sys.stderr.write(" Unable to create nvosd \n")
+ nvosd.set_property('process-mode',OSD_PROCESS_MODE)
+ nvosd.set_property('display-text',OSD_DISPLAY_TEXT)
+
+ if(is_aarch64()):
+ print("Creating transform \n ")
+ transform=Gst.ElementFactory.make("nvegltransform", "nvegl-transform")
+ if not transform:
+ sys.stderr.write(" Unable to create transform \n")
+
+ print("Creating EGLSink \n")
+ sink = Gst.ElementFactory.make("nveglglessink", "nvvideo-renderer")
+ if not sink:
+ sys.stderr.write(" Unable to create egl sink \n")
+
+ if is_live:
+ print("Atleast one of the sources is live")
+ streammux.set_property('live-source', 1)
+
+ streammux.set_property('width', 1920)
+ streammux.set_property('height', 1080)
+ streammux.set_property('batch-size', number_sources)
+ streammux.set_property('batched-push-timeout', 4000000)
+ pgie.set_property('config-file-path', "dsnvanalytics_pgie_config.txt")
+ pgie_batch_size=pgie.get_property("batch-size")
+ if(pgie_batch_size != number_sources):
+ print("WARNING: Overriding infer-config batch-size",pgie_batch_size," with number of sources ", number_sources," \n")
+ pgie.set_property("batch-size",number_sources)
+ tiler_rows=int(math.sqrt(number_sources))
+ tiler_columns=int(math.ceil((1.0*number_sources)/tiler_rows))
+ tiler.set_property("rows",tiler_rows)
+ tiler.set_property("columns",tiler_columns)
+ tiler.set_property("width", TILED_OUTPUT_WIDTH)
+ tiler.set_property("height", TILED_OUTPUT_HEIGHT)
+ sink.set_property("qos",0)
+
+ #Set properties of tracker
+ config = configparser.ConfigParser()
+ config.read('dsnvanalytics_tracker_config.txt')
+ config.sections()
+
+ for key in config['tracker']:
+ if key == 'tracker-width' :
+ tracker_width = config.getint('tracker', key)
+ tracker.set_property('tracker-width', tracker_width)
+ if key == 'tracker-height' :
+ tracker_height = config.getint('tracker', key)
+ tracker.set_property('tracker-height', tracker_height)
+ if key == 'gpu-id' :
+ tracker_gpu_id = config.getint('tracker', key)
+ tracker.set_property('gpu_id', tracker_gpu_id)
+ if key == 'll-lib-file' :
+ tracker_ll_lib_file = config.get('tracker', key)
+ tracker.set_property('ll-lib-file', tracker_ll_lib_file)
+ if key == 'll-config-file' :
+ tracker_ll_config_file = config.get('tracker', key)
+ tracker.set_property('ll-config-file', tracker_ll_config_file)
+ if key == 'enable-batch-process' :
+ tracker_enable_batch_process = config.getint('tracker', key)
+ tracker.set_property('enable_batch_process', tracker_enable_batch_process)
+ if key == 'enable-past-frame' :
+ tracker_enable_past_frame = config.getint('tracker', key)
+ tracker.set_property('enable_past_frame', tracker_enable_past_frame)
+
+ print("Adding elements to Pipeline \n")
+ pipeline.add(pgie)
+ pipeline.add(tracker)
+ pipeline.add(nvanalytics)
+ pipeline.add(tiler)
+ pipeline.add(nvvidconv)
+ pipeline.add(nvosd)
+
+ if is_aarch64():
+ pipeline.add(transform)
+ pipeline.add(sink)
+
+ # We link elements in the following order:
+ # sourcebin -> streammux -> nvinfer -> nvtracker -> nvdsanalytics ->
+ # nvtiler -> nvvideoconvert -> nvdsosd -> sink
+ print("Linking elements in the Pipeline \n")
+ streammux.link(queue1)
+ queue1.link(pgie)
+ pgie.link(queue2)
+ queue2.link(tracker)
+ tracker.link(queue3)
+ queue3.link(nvanalytics)
+ nvanalytics.link(queue4)
+ queue4.link(tiler)
+ tiler.link(queue5)
+ queue5.link(nvvidconv)
+ nvvidconv.link(queue6)
+ queue6.link(nvosd)
+ if is_aarch64():
+ nvosd.link(queue7)
+ queue7.link(transform)
+ transform.link(sink)
+ else:
+ nvosd.link(queue7)
+ queue7.link(sink)
+
+ # create an event loop and feed gstreamer bus mesages to it
+ loop = GObject.MainLoop()
+ bus = pipeline.get_bus()
+ bus.add_signal_watch()
+ bus.connect ("message", bus_call, loop)
+ nvanalytics_src_pad=nvanalytics.get_static_pad("src")
+ if not nvanalytics_src_pad:
+ sys.stderr.write(" Unable to get src pad \n")
+ else:
+ nvanalytics_src_pad.add_probe(Gst.PadProbeType.BUFFER, nvanalytics_src_pad_buffer_probe, 0)
+
+ # List the sources
+ print("Now playing...")
+ for i, source in enumerate(args):
+ if (i != 0):
+ print(i, ": ", source)
+
+ print("Starting pipeline \n")
+ # start play back and listed to events
+ pipeline.set_state(Gst.State.PLAYING)
+ try:
+ loop.run()
+ except:
+ pass
+ # cleanup
+ print("Exiting app\n")
+ pipeline.set_state(Gst.State.NULL)
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/apps/deepstream-nvdsanalytics/dsnvanalytics_pgie_config.txt b/apps/deepstream-nvdsanalytics/dsnvanalytics_pgie_config.txt
new file mode 100644
index 0000000..f86766a
--- /dev/null
+++ b/apps/deepstream-nvdsanalytics/dsnvanalytics_pgie_config.txt
@@ -0,0 +1,81 @@
+################################################################################
+# Copyright (c) 2018-2021, NVIDIA CORPORATION. All rights reserved.
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the "Software"),
+# to deal in the Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish, distribute, sublicense,
+# and/or sell copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+# DEALINGS IN THE SOFTWARE.
+################################################################################
+
+# Following properties are mandatory when engine files are not specified:
+# int8-calib-file(Only in INT8)
+# Caffemodel mandatory properties: model-file, proto-file, output-blob-names
+# UFF: uff-file, input-dims, uff-input-blob-name, output-blob-names
+# ONNX: onnx-file
+#
+# Mandatory properties for detectors:
+# num-detected-classes
+#
+# Optional properties for detectors:
+# cluster-mode(Default=Group Rectangles), interval(Primary mode only, Default=0)
+# custom-lib-path
+# parse-bbox-func-name
+#
+# Mandatory properties for classifiers:
+# classifier-threshold, is-classifier
+#
+# Optional properties for classifiers:
+# classifier-async-mode(Secondary mode only, Default=false)
+#
+# Optional properties in secondary mode:
+# operate-on-gie-id(Default=0), operate-on-class-ids(Defaults to all classes),
+# input-object-min-width, input-object-min-height, input-object-max-width,
+# input-object-max-height
+#
+# Following properties are always recommended:
+# batch-size(Default=1)
+#
+# Other optional properties:
+# net-scale-factor(Default=1), network-mode(Default=0 i.e FP32),
+# model-color-format(Default=0 i.e. RGB) model-engine-file, labelfile-path,
+# mean-file, gie-unique-id(Default=0), offsets, process-mode (Default=1 i.e. primary),
+# custom-lib-path, network-mode(Default=0 i.e FP32)
+#
+# The values in the config file are overridden by values set through GObject
+# properties.
+
+[property]
+gpu-id=0
+net-scale-factor=0.0039215697906911373
+model-file=../../../../samples/models/Primary_Detector/resnet10.caffemodel
+proto-file=../../../../samples/models/Primary_Detector/resnet10.prototxt
+model-engine-file=../../../../samples/models/Primary_Detector/resnet10.caffemodel_b1_gpu0_int8.engine
+labelfile-path=../../../../samples/models/Primary_Detector/labels.txt
+int8-calib-file=../../../../samples/models/Primary_Detector/cal_trt.bin
+force-implicit-batch-dim=1
+batch-size=1
+process-mode=1
+model-color-format=0
+network-mode=1
+num-detected-classes=4
+interval=0
+gie-unique-id=1
+output-blob-names=conv2d_bbox;conv2d_cov/Sigmoid
+
+[class-attrs-all]
+pre-cluster-threshold=0.2
+eps=0.2
+group-threshold=1
diff --git a/apps/deepstream-nvdsanalytics/dsnvanalytics_tracker_config.txt b/apps/deepstream-nvdsanalytics/dsnvanalytics_tracker_config.txt
new file mode 100644
index 0000000..adbf5e5
--- /dev/null
+++ b/apps/deepstream-nvdsanalytics/dsnvanalytics_tracker_config.txt
@@ -0,0 +1,38 @@
+################################################################################
+# Copyright (c) 2018-2021, NVIDIA CORPORATION. All rights reserved.
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the "Software"),
+# to deal in the Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish, distribute, sublicense,
+# and/or sell copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+# DEALINGS IN THE SOFTWARE.
+################################################################################
+
+# Mandatory properties for the tracker:
+# tracker-width
+# tracker-height: needs to be multiple of 6 for NvDCF
+# gpu-id
+# ll-lib-file: path to low-level tracker lib
+# ll-config-file: required for NvDCF, optional for KLT and IOU
+#
+[tracker]
+tracker-width=640
+tracker-height=384
+gpu-id=0
+ll-lib-file=/opt/nvidia/deepstream/deepstream/lib/libnvds_mot_klt.so
+#ll-lib-file=/opt/nvidia/deepstream/deepstream/lib/libnvds_nvdcf.so
+ll-config-file=tracker_config.yml
+#enable-past-frame=1
+enable-batch-process=1
diff --git a/apps/deepstream-opticalflow/README b/apps/deepstream-opticalflow/README
new file mode 100644
index 0000000..59c2127
--- /dev/null
+++ b/apps/deepstream-opticalflow/README
@@ -0,0 +1,73 @@
+################################################################################
+# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the "Software"),
+# to deal in the Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish, distribute, sublicense,
+# and/or sell copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+# DEALINGS IN THE SOFTWARE.
+################################################################################
+
+Prerequisites:
+- DeepStreamSDK 5.1
+- Python 3
+- Gst-python
+- NumPy package
+- OpenCV package
+
+To install required packages:
+$ sudo apt update
+$ sudo apt install python3 python3-opencv python3-numpy python3-gst-1.0 -y
+
+To run:
+ $ python3 deepstream-opticalflow.py [uri2] ... [uriN]
+e.g.
+ $ python3 deepstream-opticalflow.py file:///opt/nvidia/deepstream/deepstream-5.1/samples/streams/sample_720p.mp4 output
+
+
+This document shall describe about the sample deepstream-nvof-app application.
+
+Optical Flow (nvof gstreamer plugin):
+NVIDIA GPUs, starting with the Nvidia GPU Turing generation and Jetson Xavier generation,
+contain a hardware accelerator for computing optical flow. Optical flow vectors are
+useful in various use cases such as object detection and tracking, video frame rate
+up-conversion, depth estimation, stitching, and so on.
+The gst-nvof plugin collects a pair of NV12 images and passes it to the low-level
+optical flow library. The low-level library returns a map of flow vectors between
+the two frames as its output. The map of flow vectors is encapsulated in an
+NvOpticalFlowMeta structure and is added as a user meta for each frame in the batch
+using nvds_add_user_meta_to_frame() function.
+
+Optical Flow Visualization (nvofvisual gstreamer plugin):
+The Gst-nvofvisual plugin is useful for visualizing motion vector data.
+The visualization method is simmilar to the OpenCV reference source code in:
+https://github.com/opencv/opencv/blob/master/samples/gpu/optical_flow.cpp
+The plugin solves the optical flow problem by computing the magnitude and direction of
+optical flow from a two-channel array of flow vectors.
+It then visualizes the angle (direction) of flow by hue and the distance (magnitude) of
+flow by value of Hue Saturation Value (HSV) color representation.
+The strength of HSV is always set to a maximum of 255 for optimal visibility.
+
+This sample creates instance of "nvof" & "nvofvisual" gstreamer elements.
+
+1) nvof element generates the MV (motion vector) data and attaches as
+ user-meta data.
+
+2) nvofvisual element is used for visualizing the MV data using pre-defined
+ color wheel matrix.
+
+3) It then obtains the flow vectors and demonstrates visualization of these
+ flow vectors using OpenCV. The obtained image is different from the visualization
+ plugin output in color, in order to demonstrate the difference.
diff --git a/apps/deepstream-opticalflow/deepstream-opticalflow.py b/apps/deepstream-opticalflow/deepstream-opticalflow.py
new file mode 100644
index 0000000..02dd389
--- /dev/null
+++ b/apps/deepstream-opticalflow/deepstream-opticalflow.py
@@ -0,0 +1,382 @@
+################################################################################
+# Copyright (c) 2020-2021, NVIDIA CORPORATION. All rights reserved.
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the "Software"),
+# to deal in the Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish, distribute, sublicense,
+# and/or sell copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+# DEALINGS IN THE SOFTWARE.
+################################################################################
+""" Deepstream optical flow application
+"""
+
+import math
+import os
+from os import path
+import sys
+
+sys.path.append('../')
+import cv2
+import numpy as np
+
+import gi
+
+gi.require_version('Gst', '1.0')
+from gi.repository import GObject, Gst
+
+from common.is_aarch_64 import is_aarch64
+from common.bus_call import bus_call
+import pyds
+
+MAX_DISPLAY_LEN = 64
+MUXER_OUTPUT_WIDTH = 1280
+MUXER_OUTPUT_HEIGHT = 720
+MUXER_BATCH_TIMEOUT_USEC = 3400000
+TILED_OUTPUT_WIDTH = 1280
+TILED_OUTPUT_HEIGHT = 720
+GST_CAPS_FEATURES_NVMM = "memory:NVMM"
+
+
+def visualize_optical_flowvectors(flow):
+ """
+ Converts the flow u, v vectors into visualization by mapping them into
+ grey color space
+ :param flow: flow vectors
+ :return: bgr image
+ """
+ shape_visual = (flow.shape[0], flow.shape[1], 3)
+ mag, ang = cv2.cartToPolar(flow[..., 0], flow[..., 1])
+ hsv = np.full(shape_visual, 255, dtype=np.uint8)
+ hsv[..., 1] = 255
+ hsv[..., 0] = ang * 180 / np.pi / 2
+ hsv[..., 2] = cv2.normalize(mag, None, 0, 255, cv2.NORM_MINMAX)
+ bgr = cv2.cvtColor(hsv, cv2.COLOR_HSV2BGR)
+ bgr = 255 - bgr
+ return bgr
+
+
+# tiler_sink_pad_buffer_probe will extract metadata received on OSD sink pad
+def ofvisual_queue_src_pad_buffer_probe(pad, info, u_data):
+ got_visual = False
+ frame_number = 0
+ gst_buffer = info.get_buffer()
+ if not gst_buffer:
+ print("Unable to get GstBuffer ")
+ return
+
+ # Retrieve batch metadata from the gst_buffer
+ # Note that pyds.gst_buffer_get_nvds_batch_meta() expects the
+ # C address of gst_buffer as input, which is obtained with hash(gst_buffer)
+ batch_meta = pyds.gst_buffer_get_nvds_batch_meta(hash(gst_buffer))
+ l_frame = batch_meta.frame_meta_list
+ while l_frame is not None:
+ try:
+ # Note that l_frame.data needs a cast to pyds.NvDsFrameMeta
+ # The casting is done by pyds.glist_get_nvds_frame_meta()
+ # The casting also keeps ownership of the underlying memory
+ # in the C code, so the Python garbage collector will leave
+ # it alone.
+ frame_meta = pyds.glist_get_nvds_frame_meta(l_frame.data)
+ except StopIteration:
+ break
+
+ frame_number = frame_meta.frame_num
+ l_user = frame_meta.frame_user_meta_list
+ while l_user is not None:
+ try:
+ # Casting l_user.data to pyds.NvDsUserMeta
+ of_user_meta = pyds.NvDsUserMeta.cast(l_user.data)
+ except StopIteration:
+ break
+ try:
+ # Casting of_user_meta.user_meta_data to pyds.NvDsOpticalFlowMeta
+ of_meta = pyds.NvDsOpticalFlowMeta.cast(of_user_meta.user_meta_data)
+ # Get Flow vectors
+ flow_vectors = pyds.get_optical_flow_vectors(of_meta)
+ # Reshape the obtained flow vectors into proper shape
+ flow_vectors = flow_vectors.reshape(of_meta.rows, of_meta.cols, 2)
+ # map the flow vectors in HSV color space for visualization
+ flow_visual = visualize_optical_flowvectors(flow_vectors)
+ got_visual = True
+ except StopIteration:
+ break
+ try:
+ l_user = l_user.next
+ except StopIteration:
+ break
+
+ print("Frame Number=", frame_number)
+ if got_visual:
+ cv2.imwrite(folder_name + "/stream_" + str(frame_meta.pad_index)
+ + "/frame_" + str(frame_number) + ".jpg", flow_visual)
+ try:
+ l_frame = l_frame.next
+ except StopIteration:
+ break
+
+ return Gst.PadProbeReturn.OK
+
+
+def cb_newpad(decodebin, decoder_src_pad, data):
+ print("In cb_newpad\n")
+ caps = decoder_src_pad.get_current_caps()
+ gststruct = caps.get_structure(0)
+ gstname = gststruct.get_name()
+ source_bin = data
+ features = caps.get_features(0)
+
+ # Need to check if the pad created by the decodebin is for video and not
+ # audio.
+ print("gstname=", gstname)
+ if gstname.find("video") != -1:
+ # Link the decodebin pad only if decodebin has picked nvidia
+ # decoder plugin nvdec_*. We do this by checking if the pad caps contain
+ # NVMM memory features.
+ print("features=", features)
+ if features.contains("memory:NVMM"):
+ # Get the source bin ghost pad
+ bin_ghost_pad = source_bin.get_static_pad("src")
+ if not bin_ghost_pad.set_target(decoder_src_pad):
+ sys.stderr.write("Failed to link decoder src pad to source bin ghost pad\n")
+ else:
+ sys.stderr.write(" Error: Decodebin did not pick nvidia decoder plugin.\n")
+
+
+def decodebin_child_added(child_proxy, Object, name, user_data):
+ print("Decodebin child added:", name, "\n")
+ if name.find("decodebin") != -1:
+ Object.connect("child-added", decodebin_child_added, user_data)
+
+
+def create_source_bin(index, uri):
+ print("Creating source bin")
+
+ # Create a source GstBin to abstract this bin's content from the rest of the
+ # pipeline
+ bin_name = "source-bin-%02d" % index
+ print(bin_name)
+ nbin = Gst.Bin.new(bin_name)
+ if not nbin:
+ sys.stderr.write(" Unable to create source bin \n")
+
+ # Source element for reading from the uri.
+ # We will use decodebin and let it figure out the container format of the
+ # stream and the codec and plug the appropriate demux and decode plugins.
+ uri_decode_bin = Gst.ElementFactory.make("uridecodebin", "uri-decode-bin")
+ if not uri_decode_bin:
+ sys.stderr.write(" Unable to create uri decode bin \n")
+ # We set the input uri to the source element
+ uri_decode_bin.set_property("uri", uri)
+ # Connect to the "pad-added" signal of the decodebin which generates a
+ # callback once a new pad for raw data has beed created by the decodebin
+ uri_decode_bin.connect("pad-added", cb_newpad, nbin)
+ uri_decode_bin.connect("child-added", decodebin_child_added, nbin)
+
+ # We need to create a ghost pad for the source bin which will act as a proxy
+ # for the video decoder src pad. The ghost pad will not have a target right
+ # now. Once the decode bin creates the video decoder and generates the
+ # cb_newpad callback, we will set the ghost pad target to the video decoder
+ # src pad.
+ Gst.Bin.add(nbin, uri_decode_bin)
+ bin_pad = nbin.add_pad(Gst.GhostPad.new_no_target("src", Gst.PadDirection.SRC))
+ if not bin_pad:
+ sys.stderr.write(" Failed to add ghost pad in source bin \n")
+ return None
+ return nbin
+
+
+def main(args):
+ # Check input arguments
+ if len(args) < 2:
+ sys.stderr.write("usage: %s [uri2] ... [uriN]\n" % args[0])
+ sys.exit(1)
+
+ number_sources = len(args) - 2
+ global folder_name
+ folder_name = args[-1]
+ if path.exists(folder_name):
+ sys.stderr.write("The output folder %s already exists. Please remove"
+ " it first.\n" % folder_name)
+ sys.exit(1)
+
+ os.mkdir(folder_name)
+ # Standard GStreamer initialization
+ GObject.threads_init()
+ Gst.init(None)
+
+ # Create gstreamer elements */
+ # Create Pipeline element that will form a connection of other elements
+ print("Creating Pipeline \n ")
+ pipeline = Gst.Pipeline()
+
+ if not pipeline:
+ sys.stderr.write(" Unable to create Pipeline \n")
+ print("Creating streamux \n ")
+
+ # Create nvstreammux instance to form batches from one or more sources.
+ streammux = Gst.ElementFactory.make("nvstreammux", "Stream-muxer")
+ if not streammux:
+ sys.stderr.write(" Unable to create NvStreamMux \n")
+
+ pipeline.add(streammux)
+ for i in range(number_sources):
+ print("Creating source_bin ", i, " \n ")
+ uri_name = args[i + 1]
+ os.mkdir(folder_name + "/stream_" + str(i))
+ source_bin = create_source_bin(i, uri_name)
+ if not source_bin:
+ sys.stderr.write("Unable to create source bin \n")
+ pipeline.add(source_bin)
+ padname = "sink_%u" % i
+ sinkpad = streammux.get_request_pad(padname)
+ if not sinkpad:
+ sys.stderr.write("Unable to create sink pad bin \n")
+ srcpad = source_bin.get_static_pad("src")
+ if not srcpad:
+ sys.stderr.write("Unable to create src pad bin \n")
+ srcpad.link(sinkpad)
+
+ print("Creating tiler \n ")
+ tiler = Gst.ElementFactory.make("nvmultistreamtiler", "nvtiler")
+ if not tiler:
+ sys.stderr.write(" Unable to create tiler \n")
+ print("Creating nv optical flow element \n")
+ nvof = Gst.ElementFactory.make("nvof", "nvopticalflow")
+ if not nvof:
+ sys.stderr.write("Unable to create optical flow \n")
+ print("Creating nv optical flow visualisation element \n")
+ nvofvisual = Gst.ElementFactory.make("nvofvisual", "nvopticalflowvisual")
+ if not nvofvisual:
+ sys.stderr.write("Unable to create flow visualisation element")
+ print("Creating queue \n ")
+ of_queue = Gst.ElementFactory.make("queue", "q_after_of")
+ if not of_queue:
+ sys.stderr.write("Unable to create queue \n")
+ print("Creating queue \n")
+ ofvisual_queue = Gst.ElementFactory.make("queue", "q_after_ofvisual")
+ if not ofvisual_queue:
+ sys.stderr.write("Unable to create queue \n")
+
+ print("Creating Queue \n")
+ queue = Gst.ElementFactory.make("queue", "queue")
+ if not queue:
+ sys.stderr.write(" Unable to create queue \n")
+ print("Creating nvosd \n ")
+ nvosd = Gst.ElementFactory.make("nvdsosd", "onscreendisplay")
+ if not nvosd:
+ sys.stderr.write(" Unable to create nvosd \n")
+ print("Creating converter 2\n")
+ nvvidconv2 = Gst.ElementFactory.make("nvvideoconvert", "convertor2")
+ if not nvvidconv2:
+ sys.stderr.write(" Unable to create nvvidconv2 \n")
+ print("Creating capsfilter \n")
+ capsfilter = Gst.ElementFactory.make("capsfilter", "capsfilter")
+ if not capsfilter:
+ sys.stderr.write(" Unable to create capsfilter \n")
+ caps = Gst.Caps.from_string("video/x-raw, format=I420")
+ capsfilter.set_property("caps", caps)
+ print("Creating Encoder \n")
+ encoder = Gst.ElementFactory.make("avenc_mpeg4", "encoder")
+ if not encoder:
+ sys.stderr.write(" Unable to create encoder \n")
+ encoder.set_property("bitrate", 2000000)
+ print("Creating Code Parser \n")
+ codeparser = Gst.ElementFactory.make("mpeg4videoparse", "mpeg4-parser")
+ if not codeparser:
+ sys.stderr.write(" Unable to create code parser \n")
+ print("Creating Container \n")
+ container = Gst.ElementFactory.make("qtmux", "qtmux")
+ if not container:
+ sys.stderr.write(" Unable to create code parser \n")
+ print("Creating File Sink \n")
+ sink = Gst.ElementFactory.make("filesink", "filesink")
+ if not sink:
+ sys.stderr.write(" Unable to create file sink \n")
+ sink.set_property("location", "./out.mp4")
+ sink.set_property("sync", 0)
+ streammux.set_property('width', 1280)
+ streammux.set_property('height', 720)
+ streammux.set_property('batch-size', number_sources)
+ streammux.set_property('batched-push-timeout', 4000000)
+ tiler_rows = int(math.sqrt(number_sources))
+ tiler_columns = int(math.ceil((1.0 * number_sources) / tiler_rows))
+ tiler.set_property("rows", tiler_rows)
+ tiler.set_property("columns", tiler_columns)
+ tiler.set_property("width", TILED_OUTPUT_WIDTH)
+ tiler.set_property("height", TILED_OUTPUT_HEIGHT)
+
+ print("Adding elements to Pipeline \n")
+ pipeline.add(nvof)
+ pipeline.add(of_queue)
+ pipeline.add(nvofvisual)
+ pipeline.add(ofvisual_queue)
+ pipeline.add(tiler)
+ pipeline.add(nvosd)
+ pipeline.add(queue)
+ pipeline.add(nvvidconv2)
+ pipeline.add(capsfilter)
+ pipeline.add(encoder)
+ pipeline.add(codeparser)
+ pipeline.add(container)
+ pipeline.add(sink)
+
+ print("Linking elements in the Pipeline \n")
+ streammux.link(nvof)
+ nvof.link(of_queue)
+ of_queue.link(nvofvisual)
+ nvofvisual.link(ofvisual_queue)
+ ofvisual_queue.link(tiler)
+ tiler.link(nvosd)
+ nvosd.link(queue)
+ queue.link(nvvidconv2)
+ nvvidconv2.link(capsfilter)
+ capsfilter.link(encoder)
+ encoder.link(codeparser)
+ codeparser.link(container)
+ container.link(sink)
+
+ # create an event loop and feed gstreamer bus mesages to it
+ loop = GObject.MainLoop()
+ bus = pipeline.get_bus()
+ bus.add_signal_watch()
+ bus.connect("message", bus_call, loop)
+ ofvisual_queue_src_pad = ofvisual_queue.get_static_pad("src")
+ if not ofvisual_queue_src_pad:
+ sys.stderr.write(" Unable to get src pad \n")
+ else:
+ ofvisual_queue_src_pad.add_probe(Gst.PadProbeType.BUFFER,
+ ofvisual_queue_src_pad_buffer_probe,
+ 0)
+
+ # List the sources
+ print("Now playing...")
+ for i, source in enumerate(args[:-1]):
+ if i != 0:
+ print(i, ": ", source)
+
+ print("Starting pipeline \n")
+ # start play back and listed to events
+ pipeline.set_state(Gst.State.PLAYING)
+ try:
+ loop.run()
+ except:
+ pass
+ # cleanup
+ print("Exiting app\n")
+ pipeline.set_state(Gst.State.NULL)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/apps/deepstream-segmentation/README b/apps/deepstream-segmentation/README
new file mode 100644
index 0000000..aae912d
--- /dev/null
+++ b/apps/deepstream-segmentation/README
@@ -0,0 +1,63 @@
+################################################################################
+# Copyright (c) 2019-2020, NVIDIA CORPORATION. All rights reserved.
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the "Software"),
+# to deal in the Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish, distribute, sublicense,
+# and/or sell copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+# DEALINGS IN THE SOFTWARE.
+################################################################################
+
+Prerequisites:
+- DeepStreamSDK 5.1
+- Python 3.6
+- Gst-python
+- NumPy package
+- OpenCV package
+
+To install required packages:
+$ sudo apt update
+$ sudo apt install python3-numpy python3-opencv -y
+
+To run:
+ $ python3 deepstream_segmentation.py
+
+This document shall describe the sample deepstream-segmentation application.
+
+It is meant for simple demonstration of how to use the various DeepStream SDK
+elements in the pipeline and extract meaningful insights from a video stream such
+as segmentation masks and respective color mapping for segmentation visualizaiton.
+
+This sample creates instance of "nvinfer" element. Instance of
+the "nvinfer" uses TensorRT API to execute inferencing on a model. Using a
+correct configuration for a nvinfer element instance is therefore very
+important as considerable behaviors of the instance are parameterized
+through these configs.
+
+For reference, here are the config files used for this sample :
+1. The 4-class segmentation model configured through dstest_segmentation_config_semantic.txt
+2. The 2-class segmentation model configured through dstest_segmentation_config_industrial.txt
+
+In this sample, we first create one instance of "nvinfer", referred as the pgie.
+for semantic segmentation, it needs semantic model which can get 4 classes map,
+including backgroud, car, person, bicycle. Then "nvsegvidsual" plugin chooses 4 different
+colors for them and to display. Similarlty for industrial segmentation, it needs industrial
+model which can only get defective area map. Later nvinfer element attach some MetaData
+to the buffer. By attaching the probe function at the end of the pipeline, one can
+extract meaningful information from this inference. Please refer
+the "tiler_src_pad_buffer_probe" function in the sample code. For details on the
+Metadata format, refer to the file "gstnvdsmeta.h". In this probe we demonstrate
+extracting the masks and color mapping for segmentation visualization using opencv
+and numpy.
diff --git a/apps/deepstream-segmentation/deepstream_segmentation.py b/apps/deepstream-segmentation/deepstream_segmentation.py
new file mode 100644
index 0000000..51b2b1c
--- /dev/null
+++ b/apps/deepstream-segmentation/deepstream_segmentation.py
@@ -0,0 +1,285 @@
+#!/usr/bin/env python3
+
+################################################################################
+# Copyright (c) 2020-2021, NVIDIA CORPORATION. All rights reserved.
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the "Software"),
+# to deal in the Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish, distribute, sublicense,
+# and/or sell copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+# DEALINGS IN THE SOFTWARE.
+################################################################################
+
+import sys
+
+sys.path.append('../')
+import gi
+import math
+
+gi.require_version('Gst', '1.0')
+from gi.repository import GObject, Gst
+from common.is_aarch_64 import is_aarch64
+from common.bus_call import bus_call
+import cv2
+import pyds
+import numpy as np
+import os.path
+from os import path
+
+MAX_DISPLAY_LEN = 64
+MUXER_OUTPUT_WIDTH = 1920
+MUXER_OUTPUT_HEIGHT = 1080
+MUXER_BATCH_TIMEOUT_USEC = 4000000
+TILED_OUTPUT_WIDTH = 1280
+TILED_OUTPUT_HEIGHT = 720
+COLORS = [[128, 128, 64], [0, 0, 128], [0, 128, 128], [128, 0, 0],
+ [128, 0, 128], [128, 128, 0], [0, 128, 0], [0, 0, 64],
+ [0, 0, 192], [0, 128, 64], [0, 128, 192], [128, 0, 64],
+ [128, 0, 192], [128, 128, 128]]
+
+
+def map_mask_as_display_bgr(mask):
+ """ Assigning multiple colors as image output using the information
+ contained in mask. (BGR is opencv standard.)
+ """
+ # getting a list of available classes
+ m_list = list(set(mask.flatten()))
+
+ shp = mask.shape
+ bgr = np.zeros((shp[0], shp[1], 3))
+ for idx in m_list:
+ bgr[mask == idx] = COLORS[idx]
+ return bgr
+
+
+def seg_src_pad_buffer_probe(pad, info, u_data):
+ gst_buffer = info.get_buffer()
+ if not gst_buffer:
+ print("Unable to get GstBuffer ")
+ return
+
+ # Retrieve batch metadata from the gst_buffer
+ # Note that pyds.gst_buffer_get_nvds_batch_meta() expects the
+ # C address of gst_buffer as input, which is obtained with hash(gst_buffer)
+ batch_meta = pyds.gst_buffer_get_nvds_batch_meta(hash(gst_buffer))
+ l_frame = batch_meta.frame_meta_list
+ while l_frame is not None:
+ try:
+ # Note that l_frame.data needs a cast to pyds.NvDsFrameMeta
+ # The casting is done by pyds.NvDsFrameMeta.cast()
+ # The casting also keeps ownership of the underlying memory
+ # in the C code, so the Python garbage collector will leave
+ # it alone.
+ frame_meta = pyds.NvDsFrameMeta.cast(l_frame.data)
+ except StopIteration:
+ break
+ frame_number = frame_meta.frame_num
+ l_user = frame_meta.frame_user_meta_list
+ while l_user is not None:
+ try:
+ # Note that l_user.data needs a cast to pyds.NvDsUserMeta
+ # The casting is done by pyds.NvDsUserMeta.cast()
+ # The casting also keeps ownership of the underlying memory
+ # in the C code, so the Python garbage collector will leave
+ # it alone.
+ seg_user_meta = pyds.NvDsUserMeta.cast(l_user.data)
+ except StopIteration:
+ break
+ if seg_user_meta and seg_user_meta.base_meta.meta_type == \
+ pyds.NVDSINFER_SEGMENTATION_META:
+ try:
+ # Note that seg_user_meta.user_meta_data needs a cast to
+ # pyds.NvDsInferSegmentationMeta
+ # The casting is done by pyds.NvDsInferSegmentationMeta.cast()
+ # The casting also keeps ownership of the underlying memory
+ # in the C code, so the Python garbage collector will leave
+ # it alone.
+ segmeta = pyds.NvDsInferSegmentationMeta.cast(seg_user_meta.user_meta_data)
+ except StopIteration:
+ break
+ # Retrieve mask data in the numpy format from segmeta
+ # Note that pyds.get_segmentation_masks() expects object of
+ # type NvDsInferSegmentationMeta
+ masks = pyds.get_segmentation_masks(segmeta)
+ masks = np.array(masks, copy=True, order='C')
+ # map the obtained masks to colors of 2 classes.
+ frame_image = map_mask_as_display_bgr(masks)
+ cv2.imwrite(folder_name + "/" + str(frame_number) + ".jpg", frame_image)
+ try:
+ l_user = l_user.next
+ except StopIteration:
+ break
+ try:
+ l_frame = l_frame.next
+ except StopIteration:
+ break
+ return Gst.PadProbeReturn.OK
+
+
+def main(args):
+ # Check input arguments
+ if len(args) != 4:
+ sys.stderr.write("usage: %s config_file "
+ "\n" % args[0])
+ sys.exit(1)
+
+ global folder_name
+ folder_name = args[-1]
+ if path.exists(folder_name):
+ sys.stderr.write("The output folder %s already exists. "
+ "Please remove it first.\n" % folder_name)
+ sys.exit(1)
+ os.mkdir(folder_name)
+
+ config_file = args[1]
+ num_sources = len(args) - 3
+ # Standard GStreamer initialization
+ GObject.threads_init()
+ Gst.init(None)
+
+ # Create gstreamer elements
+ # Create Pipeline element that will form a connection of other elements
+ print("Creating Pipeline \n ")
+ pipeline = Gst.Pipeline()
+
+ if not pipeline:
+ sys.stderr.write(" Unable to create Pipeline \n")
+
+ # Source element for reading from the file
+ print("Creating Source \n ")
+ source = Gst.ElementFactory.make("filesrc", "file-source")
+ if not source:
+ sys.stderr.write(" Unable to create Source \n")
+
+ # Since the data format in the input file is jpeg,
+ # we need a jpegparser
+ print("Creating jpegParser \n")
+ jpegparser = Gst.ElementFactory.make("jpegparse", "jpeg-parser")
+ if not jpegparser:
+ sys.stderr.write("Unable to create jpegparser \n")
+
+ # Use nvdec for hardware accelerated decode on GPU
+ print("Creating Decoder \n")
+ decoder = Gst.ElementFactory.make("nvv4l2decoder", "nvv4l2-decoder")
+ if not decoder:
+ sys.stderr.write(" Unable to create Nvv4l2 Decoder \n")
+
+ # Create nvstreammux instance to form batches from one or more sources.
+ streammux = Gst.ElementFactory.make("nvstreammux", "Stream-muxer")
+ if not streammux:
+ sys.stderr.write(" Unable to create NvStreamMux \n")
+
+ # Create segmentation for primary inference
+ seg = Gst.ElementFactory.make("nvinfer", "primary-nvinference-engine")
+ if not seg:
+ sys.stderr.write("Unable to create primary inferene\n")
+
+ # Create nvsegvisual for visualizing segmentation
+ nvsegvisual = Gst.ElementFactory.make("nvsegvisual", "nvsegvisual")
+ if not nvsegvisual:
+ sys.stderr.write("Unable to create nvsegvisual\n")
+
+ if is_aarch64():
+ transform = Gst.ElementFactory.make("nvegltransform", "nvegl-transform")
+
+ print("Creating EGLSink \n")
+ sink = Gst.ElementFactory.make("nveglglessink", "nvvideo-renderer")
+ if not sink:
+ sys.stderr.write(" Unable to create egl sink \n")
+
+ print("Playing file %s " % args[2])
+ source.set_property('location', args[2])
+ if is_aarch64() and (args[2].endswith("mjpeg") or args[2].endswith("mjpg")):
+ decoder.set_property('mjpeg', 1)
+ streammux.set_property('width', 1920)
+ streammux.set_property('height', 1080)
+ streammux.set_property('batch-size', 1)
+ streammux.set_property('batched-push-timeout', 4000000)
+ seg.set_property('config-file-path', config_file)
+ pgie_batch_size = seg.get_property("batch-size")
+ if pgie_batch_size != num_sources:
+ print("WARNING: Overriding infer-config batch-size", pgie_batch_size,
+ " with number of sources ", num_sources,
+ " \n")
+ seg.set_property("batch-size", num_sources)
+ nvsegvisual.set_property('batch-size', num_sources)
+ nvsegvisual.set_property('width', 512)
+ nvsegvisual.set_property('height', 512)
+ sink.set_property("qos", 0)
+ print("Adding elements to Pipeline \n")
+ pipeline.add(source)
+ pipeline.add(jpegparser)
+ pipeline.add(decoder)
+ pipeline.add(streammux)
+ pipeline.add(seg)
+ pipeline.add(nvsegvisual)
+ pipeline.add(sink)
+ if is_aarch64():
+ pipeline.add(transform)
+
+ # we link the elements together
+ # file-source -> jpeg-parser -> nvv4l2-decoder ->
+ # nvinfer -> nvsegvisual -> sink
+ print("Linking elements in the Pipeline \n")
+ source.link(jpegparser)
+ jpegparser.link(decoder)
+
+ sinkpad = streammux.get_request_pad("sink_0")
+ if not sinkpad:
+ sys.stderr.write(" Unable to get the sink pad of streammux \n")
+ srcpad = decoder.get_static_pad("src")
+ if not srcpad:
+ sys.stderr.write(" Unable to get source pad of decoder \n")
+ srcpad.link(sinkpad)
+ streammux.link(seg)
+ seg.link(nvsegvisual)
+ if is_aarch64():
+ nvsegvisual.link(transform)
+ transform.link(sink)
+ else:
+ nvsegvisual.link(sink)
+ # create an event loop and feed gstreamer bus mesages to it
+ loop = GObject.MainLoop()
+ bus = pipeline.get_bus()
+ bus.add_signal_watch()
+ bus.connect("message", bus_call, loop)
+
+ # Lets add probe to get informed of the meta data generated, we add probe to
+ # the src pad of the inference element
+ seg_src_pad = seg.get_static_pad("src")
+ if not seg_src_pad:
+ sys.stderr.write(" Unable to get src pad \n")
+ else:
+ seg_src_pad.add_probe(Gst.PadProbeType.BUFFER, seg_src_pad_buffer_probe, 0)
+
+ # List the sources
+ print("Now playing...")
+ for i, source in enumerate(args[1:-1]):
+ if i != 0:
+ print(i, ": ", source)
+
+ print("Starting pipeline \n")
+ # start play back and listed to events
+ pipeline.set_state(Gst.State.PLAYING)
+ try:
+ loop.run()
+ except:
+ pass
+ # cleanup
+ pipeline.set_state(Gst.State.NULL)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/apps/deepstream-segmentation/dstest_segmentation_config_industrial.txt b/apps/deepstream-segmentation/dstest_segmentation_config_industrial.txt
new file mode 100644
index 0000000..b1062d9
--- /dev/null
+++ b/apps/deepstream-segmentation/dstest_segmentation_config_industrial.txt
@@ -0,0 +1,100 @@
+################################################################################
+# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the "Software"),
+# to deal in the Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish, distribute, sublicense,
+# and/or sell copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+# DEALINGS IN THE SOFTWARE.
+################################################################################
+
+# Following properties are mandatory when engine files are not specified:
+# int8-calib-file(Only in INT8), model-file-format
+# Caffemodel mandatory properties: model-file, proto-file, output-blob-names
+# UFF: uff-file, input-dims, uff-input-blob-name, output-blob-names
+# ONNX: onnx-file
+#
+# Mandatory properties for detectors:
+# num-detected-classes
+#
+# Optional properties for detectors:
+# cluster-mode(Default=Group Rectangles), interval(Primary mode only, Default=0)
+# custom-lib-path,
+# parse-bbox-func-name
+#
+# Mandatory properties for classifiers:
+# classifier-threshold, is-classifier
+#
+# Optional properties for classifiers:
+# classifier-async-mode(Secondary mode only, Default=false)
+#
+# Optional properties in secondary mode:
+# operate-on-gie-id(Default=0), operate-on-class-ids(Defaults to all classes),
+# input-object-min-width, input-object-min-height, input-object-max-width,
+# input-object-max-height
+#
+# Following properties are always recommended:
+# batch-size(Default=1)
+#
+# Other optional properties:
+# net-scale-factor(Default=1), network-mode(Default=0 i.e FP32),
+# model-color-format(Default=0 i.e. RGB) model-engine-file, labelfile-path,
+# mean-file, gie-unique-id(Default=0), offsets, process-mode (Default=1 i.e. primary),
+# custom-lib-path, network-mode(Default=0 i.e FP32)
+#
+# The values in the config file are overridden by values set through GObject
+# properties.
+
+[property]
+gpu-id=0
+net-scale-factor=0.003921568627451
+model-color-format=2
+uff-file=../../../../samples/models/Segmentation/industrial/unet_output_graph.uff
+model-engine-file=../../../../samples/models/Segmentation/industrial/unet_output_graph.uff_b1_gpu0_fp32.engine
+infer-dims=1;512;512
+uff-input-order=0
+uff-input-blob-name=input_1
+batch-size=1
+## 0=FP32, 1=INT8, 2=FP16 mode
+network-mode=0
+num-detected-classes=1
+interval=0
+gie-unique-id=1
+network-type=2
+output-blob-names=conv2d_19/Sigmoid
+segmentation-threshold=0.5
+#parse-bbox-func-name=NvDsInferParseCustomSSD
+#custom-lib-path=nvdsinfer_custom_impl_ssd/libnvdsinfer_custom_impl_ssd.so
+#scaling-filter=0
+#scaling-compute-hw=0
+
+[class-attrs-all]
+pre-cluster-threshold=0.5
+roi-top-offset=0
+roi-bottom-offset=0
+detected-min-w=0
+detected-min-h=0
+detected-max-w=0
+detected-max-h=0
+
+## Per class configuration
+#[class-attrs-2]
+#threshold=0.6
+#roi-top-offset=20
+#roi-bottom-offset=10
+#detected-min-w=40
+#detected-min-h=40
+#detected-max-w=400
+#detected-max-h=800
diff --git a/apps/deepstream-segmentation/dstest_segmentation_config_semantic.txt b/apps/deepstream-segmentation/dstest_segmentation_config_semantic.txt
new file mode 100644
index 0000000..2f96895
--- /dev/null
+++ b/apps/deepstream-segmentation/dstest_segmentation_config_semantic.txt
@@ -0,0 +1,99 @@
+################################################################################
+# Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved.
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the "Software"),
+# to deal in the Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish, distribute, sublicense,
+# and/or sell copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+# DEALINGS IN THE SOFTWARE.
+################################################################################
+
+# Following properties are mandatory when engine files are not specified:
+# int8-calib-file(Only in INT8), model-file-format
+# Caffemodel mandatory properties: model-file, proto-file, output-blob-names
+# UFF: uff-file, input-dims, uff-input-blob-name, output-blob-names
+# ONNX: onnx-file
+#
+# Mandatory properties for detectors:
+# num-detected-classes
+#
+# Optional properties for detectors:
+# cluster-mode(Default=Group Rectangles), interval(Primary mode only, Default=0)
+# custom-lib-path,
+# parse-bbox-func-name
+#
+# Mandatory properties for classifiers:
+# classifier-threshold, is-classifier
+#
+# Optional properties for classifiers:
+# classifier-async-mode(Secondary mode only, Default=false)
+#
+# Optional properties in secondary mode:
+# operate-on-gie-id(Default=0), operate-on-class-ids(Defaults to all classes),
+# input-object-min-width, input-object-min-height, input-object-max-width,
+# input-object-max-height
+#
+# Following properties are always recommended:
+# batch-size(Default=1)
+#
+# Other optional properties:
+# net-scale-factor(Default=1), network-mode(Default=0 i.e FP32),
+# model-color-format(Default=0 i.e. RGB) model-engine-file, labelfile-path,
+# mean-file, gie-unique-id(Default=0), offsets, process-mode (Default=1 i.e. primary),
+# custom-lib-path, network-mode(Default=0 i.e FP32)
+#
+# The values in the config file are overridden by values set through GObject
+# properties.
+
+[property]
+gpu-id=0
+net-scale-factor=1.0
+model-color-format=0
+uff-file=../../../../samples/models/Segmentation/semantic/unetres18_v4_pruned0.65_800_data.uff
+model-engine-file=../../../../samples/models/Segmentation/semantic/unetres18_v4_pruned0.65_800_data.uff_b1_gpu0_fp32.engine
+infer-dims=3;512;512
+uff-input-order=0
+uff-input-blob-name=data
+batch-size=2
+## 0=FP32, 1=INT8, 2=FP16 mode
+network-mode=0
+num-detected-classes=4
+interval=0
+gie-unique-id=1
+network-type=2
+output-blob-names=final_conv/BiasAdd
+segmentation-threshold=0.0
+#parse-bbox-func-name=NvDsInferParseCustomSSD
+#custom-lib-path=nvdsinfer_custom_impl_ssd/libnvdsinfer_custom_impl_ssd.so
+#scaling-filter=0
+#scaling-compute-hw=0
+
+[class-attrs-all]
+roi-top-offset=0
+roi-bottom-offset=0
+detected-min-w=0
+detected-min-h=0
+detected-max-w=0
+detected-max-h=0
+
+## Per class configuration
+#[class-attrs-2]
+#threshold=0.6
+#roi-top-offset=20
+#roi-bottom-offset=10
+#detected-min-w=40
+#detected-min-h=40
+#detected-max-w=400
+#detected-max-h=800
diff --git a/apps/deepstream-ssd-parser/README b/apps/deepstream-ssd-parser/README
index e5d54c2..6555a66 100644
--- a/apps/deepstream-ssd-parser/README
+++ b/apps/deepstream-ssd-parser/README
@@ -1,5 +1,5 @@
################################################################################
-# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
+# Copyright (c) 2020-2021, NVIDIA CORPORATION. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
@@ -21,7 +21,7 @@
################################################################################
Prequisites:
-- DeepStreamSDK 5.0
+- DeepStreamSDK 5.1
- NVIDIA Triton Inference Server
- Python 3.6
- Gst-python
diff --git a/apps/deepstream-ssd-parser/custom_parser_guide.md b/apps/deepstream-ssd-parser/custom_parser_guide.md
index 56061f8..ea435e3 100644
--- a/apps/deepstream-ssd-parser/custom_parser_guide.md
+++ b/apps/deepstream-ssd-parser/custom_parser_guide.md
@@ -83,15 +83,15 @@ Sample code is available for SSD parser neural network.
**3 - Tensor meta structure and bindings API**
-* pyds.NvDsInferTensorMeta.cast(data): This function casts the data into the pyds.NvDsInferTensorMeta object, This metadata is added as NvDsUserMeta to the frame_user_meta_list of the corresponding frame_meta or object_user_meta_list of the corresponding object with the meta_type set to NVDSINFER_TENSOR_OUTPUT_META.
+* pyds.NvDsInferTensorMeta.cast(data): This function casts the data into the pyds.NvDsInferTensorMeta object, this metadata is added as NvDsUserMeta to the frame_user_meta_list of the corresponding frame_meta or object_user_meta_list of the corresponding object with the meta_type set to NVDSINFER_TENSOR_OUTPUT_META.
-This object has the following methods and members
+This object has the following methods and members:
* gpu_id: GPU device ID on which the device buffers have been allocated.
-* num_output_layers: num_output_layers
+* num_output_layers: Number of output layers.
-* out_buf_ptrs_dev: Array of objects to the output device buffers for the frame / object..
+* out_buf_ptrs_dev: Array of objects to the output device buffers for the frame / object.
* out_buf_ptrs_host: Array of objects to the output host buffers for the frame / object.
@@ -105,23 +105,23 @@ This object has the following methods and members
* priv_data: Private data used for the meta producer’s internal memory management.
-* unique_id: Unique ID of the gst-nvinfer instance which attached this meta
+* unique_id: Unique ID of the gst-nvinfer instance which attached this meta.
* pyds.NvDsInferObjectDetectionInfo: Holds information about one parsed object from detector’s output.
- This object has the following methods and members
+ This object has the following methods and members:
* classId: ID of the class to which the object belongs.
* detectionConfidence: Object detection confidence. Should be a float value in the range [0,1].
-* height: Height of the bounding box shape for the object
+* height: Height of the bounding box shape for the object.
* left: Horizontal offset of the bounding box shape for the object.
* top: Vertical offset of the bounding box shape for the object.
-* width: Width of the bounding box shape for the object
+* width: Width of the bounding box shape for the object.
** **
@@ -133,53 +133,53 @@ Below is a general explanation of the deepstream-ssd-parser sample application.
**4.1.1 - main**
-* This function takes a path to a file media or uri
+* This function takes a path to a file media or uri.
-* Gstreamer initialization is performed
+* Gstreamer initialization is performed.
-* Several elements a created in order to make a pipeline
+* Several elements a created in order to make a pipeline.
* These elements are added to the pipeline and linked together.
-* Probe functions are linked to the pipeline in order to interact with the data
+* Probe functions are linked to the pipeline in order to interact with the data:
* **pgie_src_pad_buffer_probe**
* **osd_sink_pad_buffer_probe**
-* The pipeline is set to its PLAYING mode
+* The pipeline is set to its PLAYING mode.
-* The main loop is run
+* The main loop is run.
-* The pipeline is set to its NULL mode
+* The pipeline is set to its NULL mode.
**4.1.2 - pgie_src_pad_buffer_probe**
-* Initialize an empty dictionary with the class id as key and the number of occurrences as value
+* Initialize an empty dictionary with the class id as key and the number of occurrences as value.
-* For each element in the image, the number of occurrences of the element class is incremented
+* For each element in the image, the number of occurrences of the element class is incremented.
* A string containing the number of cars and persons is formatted.
-* Some display style is added to this string before displaying
+* Some display style is added to this string before displaying.
-* The string is added on the frame
+* The string is added on the frame.
**4.1.3 - osd_sink_pad_buffer_probe**
-* The gst buffer is retrieved from the info argument
+* The gst buffer is retrieved from the info argument.
-* The batch meta is retrieved from the gst buffer
+* The batch meta is retrieved from the gst buffer.
-* A detection param is created. It contains the number of class and their accuracy threshold
+* A detection param is created. It contains the number of class and their accuracy threshold.
* A box size param is created, it contains the image dimension and the minimum dimension of a bounding box. This will be used to remove bounding boxes that are too small. This neural network works only with floating-point coordinates between 0 and 1. Since the minimum dimension is given in pixels, the image dimension is needed to scale up the floating-point coordinates. And then compare them to the minimum dimension.
* A nms (Non-maximal suppression) param is created. This param contains:
- top_k: the maximum number of boxes to keep
+ top_k: the maximum number of boxes to keep.
- iou_threshold: intersection over union threshold used to discard bounding boxes that are too similar
+ iou_threshold: intersection over union threshold used to discard bounding boxes that are too similar.
* The labels are retrieved from a file in the same directory called labels.txt, this path can be specified in the config file. The labels must be one per line, and the ordering corresponds to class ID ordering.
@@ -187,7 +187,7 @@ Below is a general explanation of the deepstream-ssd-parser sample application.
* tensor meta is then used to retrieve layer information and store them in a list (layers_info).
-* Then a frame_object_list is obtained by calling **nvds_infer_parse_custom_tf_ssd** with
+* Then a frame_object_list is obtained by calling **nvds_infer_parse_custom_tf_ssd** with:
* layers_info
@@ -201,17 +201,17 @@ Below is a general explanation of the deepstream-ssd-parser sample application.
**4.1.4 - add_obj_meta_to_frame**
-* An obj_meta is created and filled with
+* An obj_meta is created and filled with:
* dimension and position information.
- * detection confidence
+ * detection confidence.
- * label name
+ * label name.
- * class id
+ * class id.
- * text to be displayed on the box around the object
+ * text to be displayed on the box around the object.
* This obj_meta is added to the frame.
@@ -247,7 +247,7 @@ Below is a general explanation of the deepstream-ssd-parser sample application.
* Each list in per_class_object_list, is sorted by confidence.
-* Then non_maximum_suppression is applied to keep only selected indices
+* Then non_maximum_suppression is applied to keep only selected indices.
* The objects related to these indices are stored in clustered_b_boxes.
diff --git a/apps/deepstream-ssd-parser/dstest_ssd_nopostprocess.txt b/apps/deepstream-ssd-parser/dstest_ssd_nopostprocess.txt
index 16d057b..df47248 100644
--- a/apps/deepstream-ssd-parser/dstest_ssd_nopostprocess.txt
+++ b/apps/deepstream-ssd-parser/dstest_ssd_nopostprocess.txt
@@ -35,7 +35,7 @@ infer_config {
}
custom_lib {
- path: "/opt/nvidia/deepstream/deepstream-5.0/lib/libnvds_infercustomparser.so"
+ path: "/opt/nvidia/deepstream/deepstream/lib/libnvds_infercustomparser.so"
}
}
input_control {
diff --git a/apps/deepstream-test1-rtsp-out/README b/apps/deepstream-test1-rtsp-out/README
index b016803..0bdc599 100644
--- a/apps/deepstream-test1-rtsp-out/README
+++ b/apps/deepstream-test1-rtsp-out/README
@@ -1,5 +1,5 @@
################################################################################
-# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
+# Copyright (c) 2020-2021, NVIDIA CORPORATION. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
@@ -21,7 +21,7 @@
################################################################################
Prequisites:
-- DeepStreamSDK 5.0
+- DeepStreamSDK 5.1
- Python 3.6
- Gst-python
- GstRtspServer
@@ -70,5 +70,5 @@ nvinfer element attach some MetaData to the buffer. By attaching
the probe function at the end of the pipeline, one can extract meaningful
information from this inference. Please refer the "osd_sink_pad_buffer_probe"
function in the sample code. For details on the Metadata format, refer to the
-file "gstnvdsmeta.h"
+file "gstnvdsmeta.h".
diff --git a/apps/deepstream-test1-usbcam/README b/apps/deepstream-test1-usbcam/README
index a76adc9..9c3c315 100644
--- a/apps/deepstream-test1-usbcam/README
+++ b/apps/deepstream-test1-usbcam/README
@@ -1,5 +1,5 @@
################################################################################
-# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
+# Copyright (c) 2020-2021, NVIDIA CORPORATION. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
@@ -21,7 +21,7 @@
################################################################################
Prequisites:
-- DeepStreamSDK 5.0
+- DeepStreamSDK 5.1
- Python 3.6
- Gst-python
@@ -52,5 +52,5 @@ nvinfer element attach some MetaData to the buffer. By attaching
the probe function at the end of the pipeline, one can extract meaningful
information from this inference. Please refer the "osd_sink_pad_buffer_probe"
function in the sample code. For details on the Metadata format, refer to the
-file "gstnvdsmeta.h"
+file "gstnvdsmeta.h".
diff --git a/apps/deepstream-test1/README b/apps/deepstream-test1/README
index c7082f3..7d2d54c 100644
--- a/apps/deepstream-test1/README
+++ b/apps/deepstream-test1/README
@@ -1,5 +1,5 @@
################################################################################
-# Copyright (c) 2019-2020, NVIDIA CORPORATION. All rights reserved.
+# Copyright (c) 2019-2021, NVIDIA CORPORATION. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
@@ -21,7 +21,7 @@
################################################################################
Prequisites:
-- DeepStreamSDK 5.0
+- DeepStreamSDK 5.1
- Python 3.6
- Gst-python
diff --git a/apps/deepstream-test2/README b/apps/deepstream-test2/README
index 9ef7284..aeadf60 100644
--- a/apps/deepstream-test2/README
+++ b/apps/deepstream-test2/README
@@ -1,5 +1,5 @@
################################################################################
-# Copyright (c) 2019-2020, NVIDIA CORPORATION. All rights reserved.
+# Copyright (c) 2019-2021, NVIDIA CORPORATION. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
@@ -21,12 +21,14 @@
################################################################################
Prequisites:
-- DeepStreamSDK 5.0
+- DeepStreamSDK 5.1
- Python 3.6
- Gst-python
To run the test app:
- $ python3 deepstream_test_2.py
+ $ python3 deepstream_test_2.py <0/1>
+
+To get the past-frame tracking meta use 1, otherwise 0, this argument is optional.
This document shall describe about the sample deepstream-test2 application.
@@ -51,6 +53,13 @@ For reference, here are the config files used for this sample :
5. The tracker (referred to as nvtracker in this sample) uses
dstest2_tracker_config.txt
+To get the past-frame-tracking meta, the following changes have to be added to
+the dstest2_tracker_config.txt.
+
+1. ll-lib-file=/opt/nvidia/deepstream/deepstream/lib/libnvds_nvdcf.so
+2. ll-config-file=tracker_config.yml
+3. enable-past-frame=1
+
In this sample, we first create one instance of "nvinfer", referred as the pgie.
This is our 4 class detector and it detects for "Vehicle , RoadSign, TwoWheeler,
Person". After this we link a "nvtracker" instance which tracks the objects
diff --git a/apps/deepstream-test2/deepstream_test_2.py b/apps/deepstream-test2/deepstream_test_2.py
index df9b806..cb81440 100644
--- a/apps/deepstream-test2/deepstream_test_2.py
+++ b/apps/deepstream-test2/deepstream_test_2.py
@@ -39,6 +39,7 @@
PGIE_CLASS_ID_BICYCLE = 1
PGIE_CLASS_ID_PERSON = 2
PGIE_CLASS_ID_ROADSIGN = 3
+past_tracking_meta=[0]
def osd_sink_pad_buffer_probe(pad,info,u_data):
frame_number=0
@@ -120,15 +121,60 @@ def osd_sink_pad_buffer_probe(pad,info,u_data):
l_frame=l_frame.next
except StopIteration:
break
+ #past traking meta data
+ if(past_tracking_meta[0]==1):
+ l_user=batch_meta.batch_user_meta_list
+ while l_user is not None:
+ try:
+ # Note that l_user.data needs a cast to pyds.NvDsUserMeta
+ # The casting is done by pyds.NvDsUserMeta.cast()
+ # The casting also keeps ownership of the underlying memory
+ # in the C code, so the Python garbage collector will leave
+ # it alone
+ user_meta=pyds.NvDsUserMeta.cast(l_user.data)
+ except StopIteration:
+ break
+ if(user_meta and user_meta.base_meta.meta_type==pyds.NvDsMetaType.NVDS_TRACKER_PAST_FRAME_META):
+ try:
+ # Note that user_meta.user_meta_data needs a cast to pyds.NvDsPastFrameObjBatch
+ # The casting is done by pyds.NvDsPastFrameObjBatch.cast()
+ # The casting also keeps ownership of the underlying memory
+ # in the C code, so the Python garbage collector will leave
+ # it alone
+ pPastFrameObjBatch = pyds.NvDsPastFrameObjBatch.cast(user_meta.user_meta_data)
+ except StopIteration:
+ break
+ for trackobj in pyds.NvDsPastFrameObjBatch.list(pPastFrameObjBatch):
+ print("streamId=",trackobj.streamID)
+ print("surfaceStreamID=",trackobj.surfaceStreamID)
+ for pastframeobj in pyds.NvDsPastFrameObjStream.list(trackobj):
+ print("numobj=",pastframeobj.numObj)
+ print("uniqueId=",pastframeobj.uniqueId)
+ print("classId=",pastframeobj.classId)
+ print("objLabel=",pastframeobj.objLabel)
+ for objlist in pyds.NvDsPastFrameObjList.list(pastframeobj):
+ print('frameNum:', objlist.frameNum)
+ print('tBbox.left:', objlist.tBbox.left)
+ print('tBbox.width:', objlist.tBbox.width)
+ print('tBbox.top:', objlist.tBbox.top)
+ print('tBbox.right:', objlist.tBbox.height)
+ print('confidence:', objlist.confidence)
+ print('age:', objlist.age)
+ try:
+ l_user=l_user.next
+ except StopIteration:
+ break
return Gst.PadProbeReturn.OK
def main(args):
# Check input arguments
- if len(args) != 2:
- sys.stderr.write("usage: %s \n" % args[0])
+ if(len(args)<2):
+ sys.stderr.write("usage: %s [0/1]\n" % args[0])
sys.exit(1)
# Standard GStreamer initialization
+ if(len(args)==3):
+ past_tracking_meta[0]=int(args[2])
GObject.threads_init()
Gst.init(None)
@@ -243,6 +289,9 @@ def main(args):
if key == 'enable-batch-process' :
tracker_enable_batch_process = config.getint('tracker', key)
tracker.set_property('enable_batch_process', tracker_enable_batch_process)
+ if key == 'enable-past-frame' :
+ tracker_enable_past_frame = config.getint('tracker', key)
+ tracker.set_property('enable_past_frame', tracker_enable_past_frame)
print("Adding elements to Pipeline \n")
pipeline.add(source)
diff --git a/apps/deepstream-test2/dstest2_tracker_config.txt b/apps/deepstream-test2/dstest2_tracker_config.txt
index a152649..c1508f5 100644
--- a/apps/deepstream-test2/dstest2_tracker_config.txt
+++ b/apps/deepstream-test2/dstest2_tracker_config.txt
@@ -31,7 +31,8 @@
tracker-width=640
tracker-height=384
gpu-id=0
-ll-lib-file=/opt/nvidia/deepstream/deepstream-5.0/lib/libnvds_mot_klt.so
-#ll-lib-file=/opt/nvidia/deepstream/deepstream-5.0/lib/libnvds_nvdcf.so
-#ll-config-file=tracker_config.yml
+ll-lib-file=/opt/nvidia/deepstream/deepstream/lib/libnvds_mot_klt.so
+#ll-lib-file=/opt/nvidia/deepstream/deepstream/lib/libnvds_nvdcf.so
+ll-config-file=tracker_config.yml
+#enable-past-frame=1
enable-batch-process=1
diff --git a/apps/deepstream-test2/tracker_config.yml b/apps/deepstream-test2/tracker_config.yml
index d34b49f..6af8f06 100644
--- a/apps/deepstream-test2/tracker_config.yml
+++ b/apps/deepstream-test2/tracker_config.yml
@@ -1,45 +1,96 @@
%YAML:1.0
-
+################################################################################
+# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the "Software"),
+# to deal in the Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish, distribute, sublicense,
+# and/or sell copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+# DEALINGS IN THE SOFTWARE.
+################################################################################
+
NvDCF:
- useColorNames: 0 # use ColorNames feature
- useHog: 1 # use Histogram-of-Oriented-Gradient (HOG) feature
+ # [General]
+ useUniqueID: 1 # Use 64-bit long Unique ID when assignining tracker ID. Default is [true]
+ maxTargetsPerStream: 99 # Max number of targets to track per stream. Recommended to set >10. Note: this value should account for the targets being tracked in shadow mode as well. Max value depends on the GPU memory capacity
+
+ # [Feature Extraction]
+ useColorNames: 1 # Use ColorNames feature
+ useHog: 0 # Use Histogram-of-Oriented-Gradient (HOG) feature
useHighPrecisionFeature: 0 # Use high-precision in feature extraction. Default is [true]
- useUniqueID: 0 # Use 64-bit long Unique ID when assignining tracker ID. Default is [false]
- maxTargetsPerStream: 99 # Max number of targets to track per stream. Recommended to set >10. Note: this value should account for the targets being tracked in shadow mode as well. Max value depends on the GPU memory capacity
+ # [DCF]
+ filterLr: 0.15 # learning rate for DCF filter in exponential moving average. Valid Range: [0.0, 1.0]
+ filterChannelWeightsLr: 0.22 # learning rate for the channel weights among feature channels. Valid Range: [0.0, 1.0]
+ gaussianSigma: 0.75 # Standard deviation for Gaussian for desired response when creating DCF filter [pixels]
+ featureImgSizeLevel: 3 # Size of a feature image. Valid range: {1, 2, 3, 4, 5}, from the smallest to the largest
+ SearchRegionPaddingScale: 1 # Search region size. Determines how large the search region should be scaled from the target bbox. Valid range: {1, 2, 3}, from the smallest to the largest
- filterLr: 0.075 #11 #11 #0.175 #0.11 # learning rate for DCF filter in exponential moving average. Valid Range: [0.0, 1.0]
- gaussianSigma: 0.75 #0.75 #0.75 #0.75 # Standard deviation for Gaussian for desired response when creating DCF filter
+ # [MOT] [False Alarm Handling]
+ maxShadowTrackingAge: 30 # Max length of shadow tracking (the shadow tracking age is incremented when (1) there's detector input yet no match or (2) tracker confidence is lower than minTrackerConfidence). Once reached, the tracker will be terminated.
+ probationAge: 3 # Once the tracker age (incremented at every frame) reaches this, the tracker is considered to be valid
+ earlyTerminationAge: 1 # Early termination age (in terms of shadow tracking age) during the probation period. If reached during the probation period, the tracker will be terminated prematurely.
- minDetectorConfidence: 0.0 # If the confidence of a detector bbox is lower than this, then it won't be considered for tracking
- minTrackerConfidence: 0.7 # If the confidence of an object tracker is lower than this on the fly, then it will be tracked in shadow mode. Valid Range: [0.0, 1.0]
- minTargetBboxSize: 5 # If the width or height of the bbox size gets smaller than this threshold, the target will be terminated
+ # [Tracker Creation Policy] [Target Candidacy]
+ minDetectorConfidence: -1 # If the confidence of a detector bbox is lower than this, then it won't be considered for tracking
+ minTrackerConfidence: 0.7 # If the confidence of an object tracker is lower than this on the fly, then it will be tracked in shadow mode. Valid Range: [0.0, 1.0]
+ minTargetBboxSize: 10 # If the width or height of the bbox size gets smaller than this threshold, the target will be terminated.
+ minDetectorBboxVisibilityTobeTracked: 0.0 # If the detector-provided bbox's visibility (i.e., IOU with image) is lower than this, it won't be considered.
+ minVisibiilty4Tracking: 0.0 # If the visibility of the tracked object (i.e., IOU with image) is lower than this, it will be terminated immediately, assuming it is going out of scene.
- featureImgSizeLevel: 2 # Size of a feature image. Valid range: {1, 2, 3, 4, 5}, from the smallest to the largest
- SearchRegionPaddingScale: 1 # Search region size. Determines how large the search region should be scaled from the target bbox. Valid range: {1, 2, 3}, from the smallest to the largest
+ # [Tracker Termination Policy]
+ targetDuplicateRunInterval: 5 # The interval in which the duplicate target detection removal is carried out. A Negative value indicates indefinite interval. Unit: [frames]
+ minIou4TargetDuplicate: 0.9 # If the IOU of two target bboxes are higher than this, the newer target tracker will be terminated.
- maxShadowTrackingAge: 30 # Max length of shadow tracking (the shadow tracking age is incremented when (1) there's detector input yet no match or (2) tracker confidence is lower than minTrackerConfidence). Once reached, the tracker will be terminated.
- probationAge: 3 # Once the tracker age (incremented at every frame) reaches this, the tracker is considered to be valid
- earlyTerminationAge: 1 # Early termination age (in terms of shadow tracking age) during the probation period
+ # [Data Association] Matching method
+ useGlobalMatching: 0 # If true, enable a global matching algorithm (i.e., Hungarian method). Otherwise, a greedy algorithm wll be used.
- # thresholds for data association
- minMatchingScore4Overall: 0.0
- minMatchingScore4Motion: 0.5
- minMatchingScore4Iou: 0.1
- minMatchingScore4VisualSimilarity: 0.2
- minTrackingConfidenceDuringInactive: 0.9
+ # [Data Association] Thresholds in matching scores to be considered as a valid candidate for matching
+ minMatchingScore4Overall: 0.0 # Min total score
+ minMatchingScore4SizeSimilarity: 0.5 # Min bbox size similarity score
+ minMatchingScore4Iou: 0.1 # Min IOU score
+ minMatchingScore4VisualSimilarity: 0.2 # Min visual similarity score
+ minTrackingConfidenceDuringInactive: 1.0 # Min tracking confidence during INACTIVE period. If tracking confidence is higher than this, then tracker will still output results until next detection
+ # [Data Association] Weights for each matching score term
matchingScoreWeight4VisualSimilarity: 0.8 # Weight for the visual similarity (in terms of correlation response ratio)
- matchingScoreWeight4Motion: 0.0 # Weight for the Size-similarity score
+ matchingScoreWeight4SizeSimilarity: 0.0 # Weight for the Size-similarity score
matchingScoreWeight4Iou: 0.1 # Weight for the IOU score
matchingScoreWeight4Age: 0.1 # Weight for the tracker age
- minDetectorBboxVisibilityTobeTracked: 0.0
- minVisibiilty4Tracking: 0.0
-
- bboxPaddingScaleForAssociation: 0.0 # Padding scale for bboxes when computing IOU for data association
- visibilityRoiFactor: 0.00 # Define the ROI of image where tracking and detection is considered to be valid. If visibilityRoiFactor = 0.05, it would shrink the ROI by 5% from the image boundary
+ # [State Estimator]
+ useTrackSmoothing: 1 # Use a state estimator
+ stateEstimatorType: 1 # The type of state estimator among { moving_avg:1, kalman_filter:2 }
+ # [State Estimator] [MovingAvgEstimator]
trackExponentialSmoothingLr_loc: 0.5 # Learning rate for new location
trackExponentialSmoothingLr_scale: 0.3 # Learning rate for new scale
trackExponentialSmoothingLr_velocity: 0.05 # Learning rate for new velocity
+
+ # [State Estimator] [Kalman Filter]
+ kfProcessNoiseVar4Loc: 0.1 # Process noise variance for location in Kalman filter
+ kfProcessNoiseVar4Scale: 0.04 # Process noise variance for scale in Kalman filter
+ kfProcessNoiseVar4Vel: 0.04 # Process noise variance for velocity in Kalman filter
+ kfMeasurementNoiseVar4Trk: 9 # Measurement noise variance for tracker's detection in Kalman filter
+ kfMeasurementNoiseVar4Det: 9 # Measurement noise variance for detector's detection in Kalman filter
+
+ # [Past-frame Data]
+ useBufferedOutput: 0 # Enable storing of past-frame data in a buffer and report it back
+
+ # [Instance-awareness]
+ useInstanceAwareness: 0 # Use instance-awareness for multi-object tracking
+ lambda_ia: 2 # Regularlization factor for each instance
+ maxInstanceNum_ia: 4 # The number of nearby object instances to use for instance-awareness
+
diff --git a/apps/deepstream-test3/README b/apps/deepstream-test3/README
index 2cc5bd1..da34b31 100644
--- a/apps/deepstream-test3/README
+++ b/apps/deepstream-test3/README
@@ -1,5 +1,5 @@
################################################################################
-# Copyright (c) 2019-2020, NVIDIA CORPORATION. All rights reserved.
+# Copyright (c) 2019-2021, NVIDIA CORPORATION. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
@@ -21,7 +21,7 @@
################################################################################
Prerequisites:
-- DeepStreamSDK 5.0
+- DeepStreamSDK 5.1
- Python 3.6
- Gst-python
diff --git a/apps/deepstream-test3/deepstream_test_3.py b/apps/deepstream-test3/deepstream_test_3.py
index 4b4fddb..81354a4 100644
--- a/apps/deepstream-test3/deepstream_test_3.py
+++ b/apps/deepstream-test3/deepstream_test_3.py
@@ -170,10 +170,7 @@ def cb_newpad(decodebin, decoder_src_pad,data):
def decodebin_child_added(child_proxy,Object,name,user_data):
print("Decodebin child added:", name, "\n")
if(name.find("decodebin") != -1):
- Object.connect("child-added",decodebin_child_added,user_data)
- if(is_aarch64() and name.find("nvv4l2decoder") != -1):
- print("Seting bufapi_version\n")
- Object.set_property("bufapi-version",True)
+ Object.connect("child-added",decodebin_child_added,user_data)
def create_source_bin(index,uri):
print("Creating source bin")
diff --git a/apps/deepstream-test4/README b/apps/deepstream-test4/README
index 95b545f..48dcaa1 100644
--- a/apps/deepstream-test4/README
+++ b/apps/deepstream-test4/README
@@ -1,5 +1,5 @@
################################################################################
-# Copyright (c) 2019-2020, NVIDIA CORPORATION. All rights reserved.
+# Copyright (c) 2019-2021, NVIDIA CORPORATION. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
@@ -21,7 +21,7 @@
################################################################################
Prerequisites:
-- DeepStreamSDK 5.0
+- DeepStreamSDK 5.1
- Python 3.6
- Gst-python
diff --git a/apps/deepstream-test4/deepstream_test_4.py b/apps/deepstream-test4/deepstream_test_4.py
index 1bc1550..02e1954 100644
--- a/apps/deepstream-test4/deepstream_test_4.py
+++ b/apps/deepstream-test4/deepstream_test_4.py
@@ -303,8 +303,8 @@ def osd_sink_pad_buffer_probe(pad,info,u_data):
# Setting callbacks in the event msg meta. The bindings layer
# will wrap these callables in C functions. Currently only one
# set of callbacks is supported.
- pyds.set_user_copyfunc(user_event_meta, meta_copy_func)
- pyds.set_user_releasefunc(user_event_meta, meta_free_func)
+ pyds.user_copyfunc(user_event_meta, meta_copy_func)
+ pyds.user_releasefunc(user_event_meta, meta_free_func)
pyds.nvds_add_user_meta_to_frame(frame_meta, user_event_meta)
else:
print("Error in attaching event meta to buffer\n")
@@ -328,6 +328,10 @@ def main(args):
GObject.threads_init()
Gst.init(None)
+ #registering callbacks
+ pyds.register_user_copyfunc(meta_copy_func)
+ pyds.register_user_releasefunc(meta_free_func)
+
print("Creating Pipeline \n ")
pipeline = Gst.Pipeline()