diff --git a/cmsis-pack-examples/README.md b/cmsis-pack-examples/README.md index 7fda3f5..3939840 100644 --- a/cmsis-pack-examples/README.md +++ b/cmsis-pack-examples/README.md @@ -14,11 +14,13 @@ - [Prerequisites](#prerequisites) - [Visual Studio Code](#visual-studio-code) - [Packs](#packs) + - [Virtual Streaming Interface](#virtual-streaming-interface) - [Building the examples](#building-the-examples) - [Launch project in Visual Studio Code](#launch-project-in-visual-studio-code) - [Download Software Packs](#download-software-packs) - [Generate and build the project](#generate-and-build-the-project) - - [Execute project](#execute-project) + - [Execute Project](#execute-project) + - [Working with Virtual Streaming Interface](#working-with-virtual-streaming-interface) - [Application output](#application-output) - [Trademarks](#trademarks) - [Licenses](#licenses) @@ -107,6 +109,33 @@ CMSIS-Pack defines a standardized way to deliver software components, device par support information and code. A list of available CMSIS-Packs can be found [here](https://developer.arm.com/tools-and-software/embedded/cmsis/cmsis-packs). +## Virtual Streaming Interface + +[Virtual Streaming Interface)](https://arm-software.github.io/AVH/main/simulation/html/group__arm__vsi.html) +(VSI) is available for certain +[Fixed Virtual Platform](https://developer.arm.com/Tools%20and%20Software/Fixed%20Virtual%20Platforms) (FVP) or +[Arm Virtual Hardware](https://developer.arm.com/Tools%20and%20Software/Arm%20Virtual%20Hardware) (AVH) +targets. For VSI supported examples, you may need to install some dependencies. + +For more details and up-to-date requirements, see +[Python environment setup](https://arm-software.github.io/AVH/main/simulation/html/group__arm__vsi__pyenv.html) +which mentions: + +> The following packages are required on Linux systems (Ubuntu 20.04 and later): +> - libatomic1 +> - python3.9 +> - python3-pip + +In addition to the above, the VSI Python scripts depend on `opencv-python` package. We recommend using +a virtual environment and installing this with pip. + +```shell +$ pip install opencv-python +``` + +**NOTE**: The requirement for Python version is driven by the FVP executable. Versions <= 11.26 require +Python3.9 but this may change for future releases. + # Building the examples ## Launch project in Visual Studio Code @@ -181,12 +210,12 @@ Build complete The built artifacts will be located under the `out/` directory in the project root. -## Execute project +## Execute Project The project is configured for execution on Arm Virtual Hardware which removes the requirement for a physical hardware board. -- When using a Fixed Virtual Platform installed locally: +- When using a Fixed Virtual Platform (FVP) installed locally: ```shell $ -a ./out/kws/AVH-SSE-300-U55/Debug/kws.Debug+AVH-SSE-300-U55.axf -f ./FVP/FVP_Corstone_SSE-300/fvp_config.txt ``` @@ -218,6 +247,21 @@ For example: $ cp ./out/kws/STM32F746-DISCO/Release/kws.Release+STM32F746-DISCO.bin /media/user/DIS_F746NG/ && sync ``` +### Working with Virtual Streaming Interface + +The object detection example for Arm Corstone-300 and Corstone-310 supports Virtual Streaming Interface (VSI). +This allows the locally installed FVP application (or an AVH instance) to read images in from a camera connected to +your local machine and stream these over to the application running within the FVP. + +To run the VSI application, append the command line with the v_path argument. For example: + +```shell + $ \ + -a ./out/object-detection-vsi/AVH-SSE-300-U55/Release/object-detection-vsi.axf \ + -C ethosu.num_macs=256 \ + -C mps3_board.v_path=./device/corstone/vsi/video/python/ + ``` + ## Application output Once the project can be built successfully, the execution on target hardware will show output of @@ -378,4 +422,21 @@ spot immediately. Please help us improve this section by reporting them via GitH Currently Keil Studio Cloud only supports running with the Arm® Ethos™-U55 on AVH virtual targets. You can build the project but will have to run it on your local machine on an - installation of the equivalent Fixed Virtual Platform containing Arm® Ethos™-U65 NPU. \ No newline at end of file + installation of the equivalent Fixed Virtual Platform containing Arm® Ethos™-U65 NPU. + +7. The newer versions of BSP packs for Arm® Corstone™-300 and Arm® Corstone™-310 require CMSIS 6. + There are warnings about some unsatisfied requirements because of this. For example: + ```shell + MISSING ARM::Device:Definition@2.0.0 + require CMSIS:CORE@6.0.0 + MISSING ARM::Device:Native Driver:SysCounter@1.1.0 + require CMSIS:CORE@6.0.0 + MISSING ARM::Device:Native Driver:SysTimer@1.1.0 + require CMSIS:CORE@6.0.0 + MISSING ARM::Device:Native Driver:Timeout@1.0.0 + require CMSIS:CORE@6.0.0 + MISSING ARM::Device:Startup&C Startup@2.0.0 + require CMSIS:CORE@6.0.0 + ``` + These are expected to be resolved once we bump up the version of CMSIS core pack. Currently, + this is blocked by other dependencies. diff --git a/cmsis-pack-examples/device/alif-ensemble/alif-ensemble-E7-device.clayer.yml b/cmsis-pack-examples/device/alif-ensemble/alif-ensemble-E7-device.clayer.yml index fbb6a84..89da158 100644 --- a/cmsis-pack-examples/device/alif-ensemble/alif-ensemble-E7-device.clayer.yml +++ b/cmsis-pack-examples/device/alif-ensemble/alif-ensemble-E7-device.clayer.yml @@ -67,7 +67,6 @@ layer: - component: AlifSemiconductor::Device:SOC Peripherals:PINCONF - component: AlifSemiconductor::Device:SOC Peripherals:MHU - component: AlifSemiconductor::Device:Startup - - component: ARM::CMSIS Driver:USART - component: ARM::CMSIS Driver:USART:Custom - component: AlifSemiconductor::Device:SOC Peripherals:DMA diff --git a/cmsis-pack-examples/device/corstone/src/BoardInit.cpp b/cmsis-pack-examples/device/corstone/src/BoardInit.cpp index 5022383..0923119 100644 --- a/cmsis-pack-examples/device/corstone/src/BoardInit.cpp +++ b/cmsis-pack-examples/device/corstone/src/BoardInit.cpp @@ -66,7 +66,11 @@ static void arm_ethosu_npu_irq_handler(void) /** @brief Initialises the NPU IRQ */ static void arm_ethosu_npu_irq_init(void) { - const IRQn_Type ethosu_irqnum = (IRQn_Type)ETHOS_U55_IRQn; + #if defined(CORSTONE310_FVP) + const IRQn_Type ethosu_irqnum = (IRQn_Type)NPU0_IRQn; + #else + const IRQn_Type ethosu_irqnum = (IRQn_Type)ETHOS_U55_IRQn; + #endif /* Register the EthosU IRQ handler in our vector table. * Note, this handler comes from the EthosU driver */ @@ -87,7 +91,11 @@ static int arm_ethosu_npu_init(void) arm_ethosu_npu_irq_init(); /* Initialise Ethos-U device */ - void* const ethosu_base_address = (void*)(ETHOS_U55_APB_BASE_S); + #if defined(CORSTONE310_FVP) + void* const ethosu_base_address = (void*)(NPU0_APB_BASE_NS); + #else + void* const ethosu_base_address = (void*)(ETHOS_U55_APB_BASE_S); + #endif debug("Cache arena: 0x%p\n", get_cache_arena()); diff --git a/cmsis-pack-examples/device/corstone/vsi/include/arm_vsi.h b/cmsis-pack-examples/device/corstone/vsi/include/arm_vsi.h new file mode 100644 index 0000000..2294aad --- /dev/null +++ b/cmsis-pack-examples/device/corstone/vsi/include/arm_vsi.h @@ -0,0 +1,112 @@ +/* + * Copyright (c) 2021-2022 Arm Limited. All rights reserved. + * + * SPDX-License-Identifier: Apache-2.0 + * + * Licensed under the Apache License, Version 2.0 (the License); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an AS IS BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * Virtual Streaming Interface (VSI) + */ + +#ifndef __ARM_VSI_H +#define __ARM_VSI_H + +#ifdef __cplusplus +extern "C" { +#endif + +#ifndef __IM +#define __IM volatile const /*! Defines 'read only' structure member permissions */ +#endif +#ifndef __OM +#define __OM volatile /*! Defines 'write only' structure member permissions */ +#endif +#ifndef __IOM +#define __IOM volatile /*! Defines 'read/write' structure member permissions */ +#endif + +#include + +/// Structure type to access the virtual streaming interface +typedef struct +{ + /// Interrupt Request (IRQ) + struct { + __IOM uint32_t Enable; /*!< (R/W) IRQ Enable */ + __OM uint32_t Set; /*!< (-/W) IRQ Set */ + __OM uint32_t Clear; /*!< (-/W) IRQ Clear */ + __IM uint32_t Status; /*!< (R/-) IRQ Status */ + } IRQ; + uint32_t reserved1[60]; + /// Time counter with 1MHz input frequency + struct { + __IOM uint32_t Control; /*!< (R/W) Timer Control */ + __IOM uint32_t Interval; /*!< (R/W) Timer Interval Value (in microseconds) */ + __IM uint32_t Count; /*!< (R/-) Timer Overflow Count */ + } Timer; + uint32_t reserved2[61]; + /// Direct Memory Access (DMA) Controller + struct { + __IOM uint32_t Control; /*!< (R/W) DMA Control */ + __IOM uint32_t Address; /*!< (R/W) DMA Memory Start Address */ + __IOM uint32_t BlockSize; /*!< (R/W) DMA Block Size (in bytes, multiple of 4) */ + __IOM uint32_t BlockNum; /*!< (R/W) DMA Number of Blocks (must be 2^n) */ + __IM uint32_t BlockIndex; /*!< (R/-) DMA Block Index */ + } DMA; + uint32_t reserved3[59]; + __IOM uint32_t Regs[64]; /*!< (R/W) User Registers */ +} ARM_VSI_Type; + +/* VSI Timer Control Definitions for Timer.Control register */ +#define ARM_VSI_Timer_Run_Pos 0U /*!< Timer Control: Run Position */ +#define ARM_VSI_Timer_Run_Msk (1UL << ARM_VSI_Timer_Run_Pos) /*!< Timer Control: Run Mask */ +#define ARM_VSI_Timer_Periodic_Pos 1U /*!< Timer Control: Periodic Position */ +#define ARM_VSI_Timer_Periodic_Msk (1UL << ARM_VSI_Timer_Periodic_Pos) /*!< Timer Control: Periodic Mask */ +#define ARM_VSI_Timer_Trig_IRQ_Pos 2U /*!< Timer Control: Trig_IRQ Position */ +#define ARM_VSI_Timer_Trig_IRQ_Msk (1UL << ARM_VSI_Timer_Trig_IRQ_Pos) /*!< Timer Control: Trig_IRQ Mask */ +#define ARM_VSI_Timer_Trig_DMA_Pos 3U /*!< Timer Control: Trig_DAM Position */ +#define ARM_VSI_Timer_Trig_DMA_Msk (1UL << ARM_VSI_Timer_Trig_DMA_Pos) /*!< Timer Control: Trig_DMA Mask */ + +/* VSI DMA Control Definitions for DMA.Control register */ +#define ARM_VSI_DMA_Enable_Pos 0U /*!< DMA Control: Enable Position */ +#define ARM_VSI_DMA_Enable_Msk (1UL << ARM_VSI_DMA_Enable_Pos) /*!< DMA Control: Enable Mask */ +#define ARM_VSI_DMA_Direction_Pos 1U /*!< DMA Control: Direction Position */ +#define ARM_VSI_DMA_Direction_Msk (1UL << ARM_VSI_DMA_Direction_Pos) /*!< DMA Control: Direction Mask */ +#define ARM_VSI_DMA_Direction_P2M (0UL*ARM_VSI_DMA_Direction_Msk) /*!< DMA Control: Direction P2M */ +#define ARM_VSI_DMA_Direction_M2P (1UL*ARM_VSI_DMA_Direction_Msk) /*!< DMA Control: Direction M2P */ + +/* Memory mapping of 8 VSI peripherals */ +#define ARM_VSI0_BASE (0x4FF00000UL) /*!< VSI 0 Base Address */ +#define ARM_VSI1_BASE (0x4FF10000UL) /*!< VSI 1 Base Address */ +#define ARM_VSI2_BASE (0x4FF20000UL) /*!< VSI 2 Base Address */ +#define ARM_VSI3_BASE (0x4FF30000UL) /*!< VSI 3 Base Address */ +#define ARM_VSI4_BASE (0x4FF40000UL) /*!< VSI 4 Base Address */ +#define ARM_VSI5_BASE (0x4FF50000UL) /*!< VSI 5 Base Address */ +#define ARM_VSI6_BASE (0x4FF60000UL) /*!< VSI 6 Base Address */ +#define ARM_VSI7_BASE (0x4FF70000UL) /*!< VSI 7 Base Address */ +#define ARM_VSI0 ((ARM_VSI_Type *)ARM_VSI0_BASE) /*!< VSI 0 struct */ +#define ARM_VSI1 ((ARM_VSI_Type *)ARM_VSI1_BASE) /*!< VSI 1 struct */ +#define ARM_VSI2 ((ARM_VSI_Type *)ARM_VSI2_BASE) /*!< VSI 2 struct */ +#define ARM_VSI3 ((ARM_VSI_Type *)ARM_VSI3_BASE) /*!< VSI 3 struct */ +#define ARM_VSI4 ((ARM_VSI_Type *)ARM_VSI4_BASE) /*!< VSI 4 struct */ +#define ARM_VSI5 ((ARM_VSI_Type *)ARM_VSI5_BASE) /*!< VSI 5 struct */ +#define ARM_VSI6 ((ARM_VSI_Type *)ARM_VSI6_BASE) /*!< VSI 6 struct */ +#define ARM_VSI7 ((ARM_VSI_Type *)ARM_VSI7_BASE) /*!< VSI 7 struct */ + +#ifdef __cplusplus +} +#endif + +#endif /* __ARM_VSI_H */ diff --git a/cmsis-pack-examples/device/corstone/vsi/provenance.md b/cmsis-pack-examples/device/corstone/vsi/provenance.md new file mode 100644 index 0000000..fe13a2c --- /dev/null +++ b/cmsis-pack-examples/device/corstone/vsi/provenance.md @@ -0,0 +1,10 @@ +# Origin of files in this directory + +- `include`: https://github.com/ARM-software/AVH/tree/1d9e2b02001a6e5d8f2fd622e5cf301144604007/interface/include +- `video`: https://github.com/ARM-software/AVH/tree/1d9e2b02001a6e5d8f2fd622e5cf301144604007/interface/video + +**Note**: Files in this repository and files from [AVH repository](https://github.com/ARM-software/AVH/) +should be under the same license terms. + +* See [AVH repository LICENSE](https://github.com/ARM-software/AVH/blob/1d9e2b02001a6e5d8f2fd622e5cf301144604007/LICENSE) +* This repository's [LICENSE](../../../LICENSE) diff --git a/cmsis-pack-examples/device/corstone/vsi/video/include/video_drv.h b/cmsis-pack-examples/device/corstone/vsi/video/include/video_drv.h new file mode 100644 index 0000000..06bdfe3 --- /dev/null +++ b/cmsis-pack-examples/device/corstone/vsi/video/include/video_drv.h @@ -0,0 +1,143 @@ +/* + * Copyright (c) 2023 Arm Limited. All rights reserved. + * + * SPDX-License-Identifier: Apache-2.0 + * + * Licensed under the Apache License, Version 2.0 (the License); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an AS IS BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef VIDEO_DRV_H +#define VIDEO_DRV_H + +#ifdef __cplusplus +extern "C" +{ +#endif + +#include + +/* Video Channel */ +#define VIDEO_DRV_IN0 (0UL) ///< Video Input channel 0 +#define VIDEO_DRV_OUT0 (1UL) ///< Video Output channel 0 +#define VIDEO_DRV_IN1 (2UL) ///< Video Input channel 1 +#define VIDEO_DRV_OUT1 (3UL) ///< Video Output channel 1 + +/* Video Mode */ +#define VIDEO_DRV_MODE_SINGLE (0UL) ///< Single frame +#define VIDEO_DRV_MODE_CONTINUOS (1UL) ///< Continuos stream + +/* Video Color Format */ +#define VIDEO_DRV_COLOR_FORMAT_BEGIN (0UL) ///< Color format begin +#define VIDEO_DRV_COLOR_GRAYSCALE8 (1UL) ///< 8 bit grayscale color format +#define VIDEO_DRV_COLOR_RGB888 (2UL) ///< 24 bit RGB color format +#define VIDEO_DRV_COLOR_BGR565 (3UL) ///< 16 bit BGR color format +#define VIDEO_DRV_COLOR_YUV420 (4UL) ///< 12 bit YUV420 color format +#define VIDEO_DRV_COLOR_NV12 (5UL) ///< 24 bit NV12 color format +#define VIDEO_DRV_COLOR_NV21 (6UL) ///< 24 bit NV12 color format +#define VIDEO_DRV_COLOR_FORMAT_END (7UL) ///< Color format end + +/* Video Event */ +#define VIDEO_DRV_EVENT_FRAME (1UL << 0) ///< Video frame received +#define VIDEO_DRV_EVENT_OVERFLOW (1UL << 1) ///< Video buffer overflow +#define VIDEO_DRV_EVENT_UNDERFLOW (1UL << 2) ///< Video buffer underflow +#define VIDEO_DRV_EVENT_EOS (1UL << 3) ///< Video end of stream + +/* Return code */ +#define VIDEO_DRV_OK (0) ///< Operation succeeded +#define VIDEO_DRV_ERROR (-1) ///< Unspecified error +#define VIDEO_DRV_ERROR_PARAMETER (-2) ///< Parameter error + +/// Video Status +typedef struct { + uint32_t active : 1; ///< Video stream active + uint32_t buf_empty : 1; ///< Video buffer empty + uint32_t buf_full : 1; ///< Video buffer full + uint32_t overflow : 1; ///< Video buffer overflow (cleared on GetStatus) + uint32_t underflow : 1; ///< Video buffer underflow (cleared on GetStatus) + uint32_t eos : 1; ///< Video end of stream (cleared on GetStatus) + uint32_t reserved : 26; +} VideoDrv_Status_t; + +/// \brief Video Events callback function type. +/// \param[in] channel channel number +/// \param[in] event events notification mask +/// \return none +typedef void (*VideoDrv_Event_t) (uint32_t channel, uint32_t event); + +/// \brief Initialize Video Interface. +/// \param[in] cb_event pointer to \ref VideoDrv_Event_t +/// \return return code +int32_t VideoDrv_Initialize (VideoDrv_Event_t cb_event); + +/// \brief De-initialize Video Interface. +/// \return return code +int32_t VideoDrv_Uninitialize (void); + +/// \brief Set Video channel file. +/// \param[in] channel channel number +/// \param[in] name video filename (pointer to NULL terminated string) +/// \return return code +int32_t VideoDrv_SetFile (uint32_t channel, const char *name); + +/// \brief Configure Video channel. +/// \param[in] channel channel number +/// \param[in] frame_width frame width in pixels +/// \param[in] frame_height frame height in pixels +/// \param[in] color_format pixel color format +/// \param[in] frame_rate frame rate (frames per second) +/// \return return code +int32_t VideoDrv_Configure (uint32_t channel, uint32_t frame_width, uint32_t frame_height, uint32_t color_format, uint32_t frame_rate); + +/// \brief Set Video channel buffer. +/// \param[in] channel channel number +/// \param[in] buf pointer to buffer for video stream +/// \param[in] buf_size video stream buffer size in bytes +/// \return return code +int32_t VideoDrv_SetBuf (uint32_t channel, void *buf, uint32_t buf_size); + +/// \brief Flush Video channel buffer. +/// \param[in] channel channel number +/// \return return code +int32_t VideoDrv_FlushBuf (uint32_t channel); + +/// \brief Start Video channel stream. +/// \param[in] channel channel number +/// \param[in] mode stream mode +/// \return return code +int32_t VideoDrv_StreamStart (uint32_t channel, uint32_t mode); + +/// \brief Stop Video channel stream. +/// \param[in] channel channel number +/// \return return code +int32_t VideoDrv_StreamStop (uint32_t channel); + +/// \brief Get Video channel Frame buffer. +/// \param[in] channel channel number +/// \return pointer to frame buffer +void *VideoDrv_GetFrameBuf (uint32_t channel); + +/// \brief Release Video channel Frame. +/// \param[in] channel channel number +/// \return return code +int32_t VideoDrv_ReleaseFrame (uint32_t channel); + +/// \brief Get Video channel status. +/// \param[in] channel channel number +/// \return \ref VideoDrv_Status_t +VideoDrv_Status_t VideoDrv_GetStatus (uint32_t channel); + +#ifdef __cplusplus +} +#endif + +#endif /* VIDEO_DRV_H */ diff --git a/cmsis-pack-examples/device/corstone/vsi/video/python/arm_vsi4.py b/cmsis-pack-examples/device/corstone/vsi/video/python/arm_vsi4.py new file mode 100644 index 0000000..657cdb1 --- /dev/null +++ b/cmsis-pack-examples/device/corstone/vsi/video/python/arm_vsi4.py @@ -0,0 +1,203 @@ +# Copyright (c) 2021-2023 Arm Limited. All rights reserved. + +# Virtual Streaming Interface instance 4 Python script + +##@addtogroup arm_vsi_py_video +# @{ +# +##@package arm_vsi4_video +#Documentation for VSI peripherals module. +# +#More details. + +import logging +import vsi_video + +## Set verbosity level +#verbosity = logging.DEBUG +verbosity = logging.ERROR + +# [debugging] Verbosity settings +level = { 10: "DEBUG", 20: "INFO", 30: "WARNING", 40: "ERROR" } +logging.basicConfig(format='Py: VSI4: [%(levelname)s]\t%(message)s', level = verbosity) +logging.info("Verbosity level is set to " + level[verbosity]) + + +# Video Server configuration +server_address = ('127.0.0.1', 6000) +server_authkey = 'vsi_video' + + +# IRQ registers +IRQ_Status = 0 + +# Timer registers +Timer_Control = 0 +Timer_Interval = 0 + +# Timer Control register definitions +Timer_Control_Run_Msk = 1<<0 +Timer_Control_Periodic_Msk = 1<<1 +Timer_Control_Trig_IRQ_Msk = 1<<2 +Timer_Control_Trig_DMA_Msk = 1<<3 + +# DMA registers +DMA_Control = 0 + +# DMA Control register definitions +DMA_Control_Enable_Msk = 1<<0 +DMA_Control_Direction_Msk = 1<<1 +DMA_Control_Direction_P2M = 0<<1 +DMA_Control_Direction_M2P = 1<<1 + +# User registers +Regs = [0] * 64 + +# Data buffer +Data = bytearray() + + +## Initialize +# @return None +def init(): + logging.info("Python function init() called") + vsi_video.init(server_address, server_authkey) + + +## Read interrupt request (the VSI IRQ Status Register) +# @return value value read (32-bit) +def rdIRQ(): + global IRQ_Status + logging.info("Python function rdIRQ() called") + + value = IRQ_Status + logging.debug("Read interrupt request: {}".format(value)) + + return value + + +## Write interrupt request (the VSI IRQ Status Register) +# @param value value to write (32-bit) +# @return value value written (32-bit) +def wrIRQ(value): + global IRQ_Status + logging.info("Python function wrIRQ() called") + + value = vsi_video.wrIRQ(IRQ_Status, value) + IRQ_Status = value + logging.debug("Write interrupt request: {}".format(value)) + + return value + + +## Write Timer registers (the VSI Timer Registers) +# @param index Timer register index (zero based) +# @param value value to write (32-bit) +# @return value value written (32-bit) +def wrTimer(index, value): + global Timer_Control, Timer_Interval + logging.info("Python function wrTimer() called") + + if index == 0: + Timer_Control = value + logging.debug("Write Timer_Control: {}".format(value)) + elif index == 1: + Timer_Interval = value + logging.debug("Write Timer_Interval: {}".format(value)) + + return value + + +## Timer event (called at Timer Overflow) +# @return None +def timerEvent(): + global IRQ_Status + + logging.info("Python function timerEvent() called") + + IRQ_Status = vsi_video.timerEvent(IRQ_Status) + + +## Write DMA registers (the VSI DMA Registers) +# @param index DMA register index (zero based) +# @param value value to write (32-bit) +# @return value value written (32-bit) +def wrDMA(index, value): + global DMA_Control + logging.info("Python function wrDMA() called") + + if index == 0: + DMA_Control = value + logging.debug("Write DMA_Control: {}".format(value)) + + return value + + +## Read data from peripheral for DMA P2M transfer (VSI DMA) +# @param size size of data to read (in bytes, multiple of 4) +# @return data data read (bytearray) +def rdDataDMA(size): + global Data + logging.info("Python function rdDataDMA() called") + + Data = vsi_video.rdDataDMA(size) + + n = min(len(Data), size) + data = bytearray(size) + data[0:n] = Data[0:n] + logging.debug("Read data ({} bytes)".format(size)) + + return data + + +## Write data to peripheral for DMA M2P transfer (VSI DMA) +# @param data data to write (bytearray) +# @param size size of data to write (in bytes, multiple of 4) +# @return None +def wrDataDMA(data, size): + global Data + logging.info("Python function wrDataDMA() called") + + Data = data + logging.debug("Write data ({} bytes)".format(size)) + + vsi_video.wrDataDMA(data, size) + + return + + +## Read user registers (the VSI User Registers) +# @param index user register index (zero based) +# @return value value read (32-bit) +def rdRegs(index): + global Regs + logging.info("Python function rdRegs() called") + + if index <= vsi_video.REG_IDX_MAX: + Regs[index] = vsi_video.rdRegs(index) + + value = Regs[index] + logging.debug("Read user register at index {}: {}".format(index, value)) + + return value + + +## Write user registers (the VSI User Registers) +# @param index user register index (zero based) +# @param value value to write (32-bit) +# @return value value written (32-bit) +def wrRegs(index, value): + global Regs + logging.info("Python function wrRegs() called") + + if index <= vsi_video.REG_IDX_MAX: + value = vsi_video.wrRegs(index, value) + + Regs[index] = value + logging.debug("Write user register at index {}: {}".format(index, value)) + + return value + + +## @} + diff --git a/cmsis-pack-examples/device/corstone/vsi/video/python/arm_vsi5.py b/cmsis-pack-examples/device/corstone/vsi/video/python/arm_vsi5.py new file mode 100644 index 0000000..d3469c1 --- /dev/null +++ b/cmsis-pack-examples/device/corstone/vsi/video/python/arm_vsi5.py @@ -0,0 +1,203 @@ +# Copyright (c) 2021-2023 Arm Limited. All rights reserved. + +# Virtual Streaming Interface instance 5 Python script + +##@addtogroup arm_vsi_py_video +# @{ +# +##@package arm_vsi5_video +#Documentation for VSI peripherals module. +# +#More details. + +import logging +import vsi_video + +## Set verbosity level +#verbosity = logging.DEBUG +verbosity = logging.ERROR + +# [debugging] Verbosity settings +level = { 10: "DEBUG", 20: "INFO", 30: "WARNING", 40: "ERROR" } +logging.basicConfig(format='Py: VSI5: [%(levelname)s]\t%(message)s', level = verbosity) +logging.info("Verbosity level is set to " + level[verbosity]) + + +# Video Server configuration +server_address = ('127.0.0.1', 6001) +server_authkey = 'vsi_video' + + +# IRQ registers +IRQ_Status = 0 + +# Timer registers +Timer_Control = 0 +Timer_Interval = 0 + +# Timer Control register definitions +Timer_Control_Run_Msk = 1<<0 +Timer_Control_Periodic_Msk = 1<<1 +Timer_Control_Trig_IRQ_Msk = 1<<2 +Timer_Control_Trig_DMA_Msk = 1<<3 + +# DMA registers +DMA_Control = 0 + +# DMA Control register definitions +DMA_Control_Enable_Msk = 1<<0 +DMA_Control_Direction_Msk = 1<<1 +DMA_Control_Direction_P2M = 0<<1 +DMA_Control_Direction_M2P = 1<<1 + +# User registers +Regs = [0] * 64 + +# Data buffer +Data = bytearray() + + +## Initialize +# @return None +def init(): + logging.info("Python function init() called") + vsi_video.init(server_address, server_authkey) + + +## Read interrupt request (the VSI IRQ Status Register) +# @return value value read (32-bit) +def rdIRQ(): + global IRQ_Status + logging.info("Python function rdIRQ() called") + + value = IRQ_Status + logging.debug("Read interrupt request: {}".format(value)) + + return value + + +## Write interrupt request (the VSI IRQ Status Register) +# @param value value to write (32-bit) +# @return value value written (32-bit) +def wrIRQ(value): + global IRQ_Status + logging.info("Python function wrIRQ() called") + + value = vsi_video.wrIRQ(IRQ_Status, value) + IRQ_Status = value + logging.debug("Write interrupt request: {}".format(value)) + + return value + + +## Write Timer registers (the VSI Timer Registers) +# @param index Timer register index (zero based) +# @param value value to write (32-bit) +# @return value value written (32-bit) +def wrTimer(index, value): + global Timer_Control, Timer_Interval + logging.info("Python function wrTimer() called") + + if index == 0: + Timer_Control = value + logging.debug("Write Timer_Control: {}".format(value)) + elif index == 1: + Timer_Interval = value + logging.debug("Write Timer_Interval: {}".format(value)) + + return value + + +## Timer event (called at Timer Overflow) +# @return None +def timerEvent(): + global IRQ_Status + + logging.info("Python function timerEvent() called") + + IRQ_Status = vsi_video.timerEvent(IRQ_Status) + + +## Write DMA registers (the VSI DMA Registers) +# @param index DMA register index (zero based) +# @param value value to write (32-bit) +# @return value value written (32-bit) +def wrDMA(index, value): + global DMA_Control + logging.info("Python function wrDMA() called") + + if index == 0: + DMA_Control = value + logging.debug("Write DMA_Control: {}".format(value)) + + return value + + +## Read data from peripheral for DMA P2M transfer (VSI DMA) +# @param size size of data to read (in bytes, multiple of 4) +# @return data data read (bytearray) +def rdDataDMA(size): + global Data + logging.info("Python function rdDataDMA() called") + + Data = vsi_video.rdDataDMA(size) + + n = min(len(Data), size) + data = bytearray(size) + data[0:n] = Data[0:n] + logging.debug("Read data ({} bytes)".format(size)) + + return data + + +## Write data to peripheral for DMA M2P transfer (VSI DMA) +# @param data data to write (bytearray) +# @param size size of data to write (in bytes, multiple of 4) +# @return None +def wrDataDMA(data, size): + global Data + logging.info("Python function wrDataDMA() called") + + Data = data + logging.debug("Write data ({} bytes)".format(size)) + + vsi_video.wrDataDMA(data, size) + + return + + +## Read user registers (the VSI User Registers) +# @param index user register index (zero based) +# @return value value read (32-bit) +def rdRegs(index): + global Regs + logging.info("Python function rdRegs() called") + + if index <= vsi_video.REG_IDX_MAX: + Regs[index] = vsi_video.rdRegs(index) + + value = Regs[index] + logging.debug("Read user register at index {}: {}".format(index, value)) + + return value + + +## Write user registers (the VSI User Registers) +# @param index user register index (zero based) +# @param value value to write (32-bit) +# @return value value written (32-bit) +def wrRegs(index, value): + global Regs + logging.info("Python function wrRegs() called") + + if index <= vsi_video.REG_IDX_MAX: + value = vsi_video.wrRegs(index, value) + + Regs[index] = value + logging.debug("Write user register at index {}: {}".format(index, value)) + + return value + + +## @} + diff --git a/cmsis-pack-examples/device/corstone/vsi/video/python/arm_vsi6.py b/cmsis-pack-examples/device/corstone/vsi/video/python/arm_vsi6.py new file mode 100644 index 0000000..718616e --- /dev/null +++ b/cmsis-pack-examples/device/corstone/vsi/video/python/arm_vsi6.py @@ -0,0 +1,203 @@ +# Copyright (c) 2021-2023 Arm Limited. All rights reserved. + +# Virtual Streaming Interface instance 6 Python script + +##@addtogroup arm_vsi_py_video +# @{ +# +##@package arm_vsi6_video +#Documentation for VSI peripherals module. +# +#More details. + +import logging +import vsi_video + +## Set verbosity level +#verbosity = logging.DEBUG +verbosity = logging.ERROR + +# [debugging] Verbosity settings +level = { 10: "DEBUG", 20: "INFO", 30: "WARNING", 40: "ERROR" } +logging.basicConfig(format='Py: VSI6: [%(levelname)s]\t%(message)s', level = verbosity) +logging.info("Verbosity level is set to " + level[verbosity]) + + +# Video Server configuration +server_address = ('127.0.0.1', 6002) +server_authkey = 'vsi_video' + + +# IRQ registers +IRQ_Status = 0 + +# Timer registers +Timer_Control = 0 +Timer_Interval = 0 + +# Timer Control register definitions +Timer_Control_Run_Msk = 1<<0 +Timer_Control_Periodic_Msk = 1<<1 +Timer_Control_Trig_IRQ_Msk = 1<<2 +Timer_Control_Trig_DMA_Msk = 1<<3 + +# DMA registers +DMA_Control = 0 + +# DMA Control register definitions +DMA_Control_Enable_Msk = 1<<0 +DMA_Control_Direction_Msk = 1<<1 +DMA_Control_Direction_P2M = 0<<1 +DMA_Control_Direction_M2P = 1<<1 + +# User registers +Regs = [0] * 64 + +# Data buffer +Data = bytearray() + + +## Initialize +# @return None +def init(): + logging.info("Python function init() called") + vsi_video.init(server_address, server_authkey) + + +## Read interrupt request (the VSI IRQ Status Register) +# @return value value read (32-bit) +def rdIRQ(): + global IRQ_Status + logging.info("Python function rdIRQ() called") + + value = IRQ_Status + logging.debug("Read interrupt request: {}".format(value)) + + return value + + +## Write interrupt request (the VSI IRQ Status Register) +# @param value value to write (32-bit) +# @return value value written (32-bit) +def wrIRQ(value): + global IRQ_Status + logging.info("Python function wrIRQ() called") + + value = vsi_video.wrIRQ(IRQ_Status, value) + IRQ_Status = value + logging.debug("Write interrupt request: {}".format(value)) + + return value + + +## Write Timer registers (the VSI Timer Registers) +# @param index Timer register index (zero based) +# @param value value to write (32-bit) +# @return value value written (32-bit) +def wrTimer(index, value): + global Timer_Control, Timer_Interval + logging.info("Python function wrTimer() called") + + if index == 0: + Timer_Control = value + logging.debug("Write Timer_Control: {}".format(value)) + elif index == 1: + Timer_Interval = value + logging.debug("Write Timer_Interval: {}".format(value)) + + return value + + +## Timer event (called at Timer Overflow) +# @return None +def timerEvent(): + global IRQ_Status + + logging.info("Python function timerEvent() called") + + IRQ_Status = vsi_video.timerEvent(IRQ_Status) + + +## Write DMA registers (the VSI DMA Registers) +# @param index DMA register index (zero based) +# @param value value to write (32-bit) +# @return value value written (32-bit) +def wrDMA(index, value): + global DMA_Control + logging.info("Python function wrDMA() called") + + if index == 0: + DMA_Control = value + logging.debug("Write DMA_Control: {}".format(value)) + + return value + + +## Read data from peripheral for DMA P2M transfer (VSI DMA) +# @param size size of data to read (in bytes, multiple of 4) +# @return data data read (bytearray) +def rdDataDMA(size): + global Data + logging.info("Python function rdDataDMA() called") + + Data = vsi_video.rdDataDMA(size) + + n = min(len(Data), size) + data = bytearray(size) + data[0:n] = Data[0:n] + logging.debug("Read data ({} bytes)".format(size)) + + return data + + +## Write data to peripheral for DMA M2P transfer (VSI DMA) +# @param data data to write (bytearray) +# @param size size of data to write (in bytes, multiple of 4) +# @return None +def wrDataDMA(data, size): + global Data + logging.info("Python function wrDataDMA() called") + + Data = data + logging.debug("Write data ({} bytes)".format(size)) + + vsi_video.wrDataDMA(data, size) + + return + + +## Read user registers (the VSI User Registers) +# @param index user register index (zero based) +# @return value value read (32-bit) +def rdRegs(index): + global Regs + logging.info("Python function rdRegs() called") + + if index <= vsi_video.REG_IDX_MAX: + Regs[index] = vsi_video.rdRegs(index) + + value = Regs[index] + logging.debug("Read user register at index {}: {}".format(index, value)) + + return value + + +## Write user registers (the VSI User Registers) +# @param index user register index (zero based) +# @param value value to write (32-bit) +# @return value value written (32-bit) +def wrRegs(index, value): + global Regs + logging.info("Python function wrRegs() called") + + if index <= vsi_video.REG_IDX_MAX: + value = vsi_video.wrRegs(index, value) + + Regs[index] = value + logging.debug("Write user register at index {}: {}".format(index, value)) + + return value + + +## @} + diff --git a/cmsis-pack-examples/device/corstone/vsi/video/python/arm_vsi7.py b/cmsis-pack-examples/device/corstone/vsi/video/python/arm_vsi7.py new file mode 100644 index 0000000..6899a20 --- /dev/null +++ b/cmsis-pack-examples/device/corstone/vsi/video/python/arm_vsi7.py @@ -0,0 +1,203 @@ +# Copyright (c) 2021-2023 Arm Limited. All rights reserved. + +# Virtual Streaming Interface instance 7 Python script + +##@addtogroup arm_vsi_py_video +# @{ +# +##@package arm_vsi7_video +#Documentation for VSI peripherals module. +# +#More details. + +import logging +import vsi_video + +## Set verbosity level +#verbosity = logging.DEBUG +verbosity = logging.ERROR + +# [debugging] Verbosity settings +level = { 10: "DEBUG", 20: "INFO", 30: "WARNING", 40: "ERROR" } +logging.basicConfig(format='Py: VSI7: [%(levelname)s]\t%(message)s', level = verbosity) +logging.info("Verbosity level is set to " + level[verbosity]) + + +# Video Server configuration +server_address = ('127.0.0.1', 6003) +server_authkey = 'vsi_video' + + +# IRQ registers +IRQ_Status = 0 + +# Timer registers +Timer_Control = 0 +Timer_Interval = 0 + +# Timer Control register definitions +Timer_Control_Run_Msk = 1<<0 +Timer_Control_Periodic_Msk = 1<<1 +Timer_Control_Trig_IRQ_Msk = 1<<2 +Timer_Control_Trig_DMA_Msk = 1<<3 + +# DMA registers +DMA_Control = 0 + +# DMA Control register definitions +DMA_Control_Enable_Msk = 1<<0 +DMA_Control_Direction_Msk = 1<<1 +DMA_Control_Direction_P2M = 0<<1 +DMA_Control_Direction_M2P = 1<<1 + +# User registers +Regs = [0] * 64 + +# Data buffer +Data = bytearray() + + +## Initialize +# @return None +def init(): + logging.info("Python function init() called") + vsi_video.init(server_address, server_authkey) + + +## Read interrupt request (the VSI IRQ Status Register) +# @return value value read (32-bit) +def rdIRQ(): + global IRQ_Status + logging.info("Python function rdIRQ() called") + + value = IRQ_Status + logging.debug("Read interrupt request: {}".format(value)) + + return value + + +## Write interrupt request (the VSI IRQ Status Register) +# @param value value to write (32-bit) +# @return value value written (32-bit) +def wrIRQ(value): + global IRQ_Status + logging.info("Python function wrIRQ() called") + + value = vsi_video.wrIRQ(IRQ_Status, value) + IRQ_Status = value + logging.debug("Write interrupt request: {}".format(value)) + + return value + + +## Write Timer registers (the VSI Timer Registers) +# @param index Timer register index (zero based) +# @param value value to write (32-bit) +# @return value value written (32-bit) +def wrTimer(index, value): + global Timer_Control, Timer_Interval + logging.info("Python function wrTimer() called") + + if index == 0: + Timer_Control = value + logging.debug("Write Timer_Control: {}".format(value)) + elif index == 1: + Timer_Interval = value + logging.debug("Write Timer_Interval: {}".format(value)) + + return value + + +## Timer event (called at Timer Overflow) +# @return None +def timerEvent(): + global IRQ_Status + + logging.info("Python function timerEvent() called") + + IRQ_Status = vsi_video.timerEvent(IRQ_Status) + + +## Write DMA registers (the VSI DMA Registers) +# @param index DMA register index (zero based) +# @param value value to write (32-bit) +# @return value value written (32-bit) +def wrDMA(index, value): + global DMA_Control + logging.info("Python function wrDMA() called") + + if index == 0: + DMA_Control = value + logging.debug("Write DMA_Control: {}".format(value)) + + return value + + +## Read data from peripheral for DMA P2M transfer (VSI DMA) +# @param size size of data to read (in bytes, multiple of 4) +# @return data data read (bytearray) +def rdDataDMA(size): + global Data + logging.info("Python function rdDataDMA() called") + + Data = vsi_video.rdDataDMA(size) + + n = min(len(Data), size) + data = bytearray(size) + data[0:n] = Data[0:n] + logging.debug("Read data ({} bytes)".format(size)) + + return data + + +## Write data to peripheral for DMA M2P transfer (VSI DMA) +# @param data data to write (bytearray) +# @param size size of data to write (in bytes, multiple of 4) +# @return None +def wrDataDMA(data, size): + global Data + logging.info("Python function wrDataDMA() called") + + Data = data + logging.debug("Write data ({} bytes)".format(size)) + + vsi_video.wrDataDMA(data, size) + + return + + +## Read user registers (the VSI User Registers) +# @param index user register index (zero based) +# @return value value read (32-bit) +def rdRegs(index): + global Regs + logging.info("Python function rdRegs() called") + + if index <= vsi_video.REG_IDX_MAX: + Regs[index] = vsi_video.rdRegs(index) + + value = Regs[index] + logging.debug("Read user register at index {}: {}".format(index, value)) + + return value + + +## Write user registers (the VSI User Registers) +# @param index user register index (zero based) +# @param value value to write (32-bit) +# @return value value written (32-bit) +def wrRegs(index, value): + global Regs + logging.info("Python function wrRegs() called") + + if index <= vsi_video.REG_IDX_MAX: + value = vsi_video.wrRegs(index, value) + + Regs[index] = value + logging.debug("Write user register at index {}: {}".format(index, value)) + + return value + + +## @} + diff --git a/cmsis-pack-examples/device/corstone/vsi/video/python/vsi_video.py b/cmsis-pack-examples/device/corstone/vsi/video/python/vsi_video.py new file mode 100644 index 0000000..8a7c6db --- /dev/null +++ b/cmsis-pack-examples/device/corstone/vsi/video/python/vsi_video.py @@ -0,0 +1,470 @@ +# Copyright (c) 2023-2024 Arm Limited. All rights reserved. +# +# SPDX-License-Identifier: Apache-2.0 +# +# Licensed under the Apache License, Version 2.0 (the License); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an AS IS BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Python VSI Video Client module + +try: + import time + import atexit + import logging + import subprocess + from multiprocessing.connection import Client, Connection + from os import path, getcwd + from os import name as os_name +except ImportError as err: + print(f"VSI:Video:ImportError: {err}") + raise +except Exception as e: + print(f"VSI:Video:Exception: {type(e).__name__}") + raise + + +class VideoClient: + def __init__(self): + # Server commands + self.SET_FILENAME = 1 + self.STREAM_CONFIGURE = 2 + self.STREAM_ENABLE = 3 + self.STREAM_DISABLE = 4 + self.FRAME_READ = 5 + self.FRAME_WRITE = 6 + self.CLOSE_SERVER = 7 + # Color space + self.GRAYSCALE8 = 1 + self.RGB888 = 2 + self.BGR565 = 3 + self.YUV420 = 4 + self.NV12 = 5 + self.NV21 = 6 + # Variables + self.conn = None + + def connectToServer(self, address, authkey): + for _ in range(50): + try: + self.conn = Client(address, authkey=authkey.encode('utf-8')) + if isinstance(self.conn, Connection): + break + else: + self.conn = None + except Exception: + self.conn = None + time.sleep(0.01) + + def setFilename(self, filename, mode): + self.conn.send([self.SET_FILENAME, getcwd(), filename, mode]) + filename_valid = self.conn.recv() + + return filename_valid + + def configureStream(self, frame_width, frame_height, color_format, frame_rate): + self.conn.send([self.STREAM_CONFIGURE, frame_width, frame_height, color_format, frame_rate]) + configuration_valid = self.conn.recv() + + return configuration_valid + + def enableStream(self, mode): + self.conn.send([self.STREAM_ENABLE, mode]) + stream_active = self.conn.recv() + + return stream_active + + def disableStream(self): + self.conn.send([self.STREAM_DISABLE]) + stream_active = self.conn.recv() + + return stream_active + + def readFrame(self): + self.conn.send([self.FRAME_READ]) + data = self.conn.recv_bytes() + eos = self.conn.recv() + + return data, eos + + def writeFrame(self, data): + self.conn.send([self.FRAME_WRITE]) + self.conn.send_bytes(data) + + def closeServer(self): + try: + if isinstance(self.conn, Connection): + self.conn.send([self.CLOSE_SERVER]) + self.conn.close() + except Exception as e: + logging.error(f'Exception occurred on cleanup: {e}') + + +# User registers +REG_IDX_MAX = 12 # Maximum user register index used in VSI +MODE = 0 # Regs[0] // Mode: 0=Input, 1=Output +CONTROL = 0 # Regs[1] // Control: enable, flush +STATUS = 0 # Regs[2] // Status: active, buf_empty, buf_full, overflow, underflow, eos +FILENAME_LEN = 0 # Regs[3] // Filename length +FILENAME_CHAR = 0 # Regs[4] // Filename character +FILENAME_VALID = 0 # Regs[5] // Filename valid flag +FRAME_WIDTH = 300 # Regs[6] // Requested frame width +FRAME_HEIGHT = 300 # Regs[7] // Requested frame height +COLOR_FORMAT = 0 # Regs[8] // Color format +FRAME_RATE = 0 # Regs[9] // Frame rate +FRAME_INDEX = 0 # Regs[10] // Frame index +FRAME_COUNT = 0 # Regs[11] // Frame count +FRAME_COUNT_MAX = 0 # Regs[12] // Frame count maximum + +# MODE register definitions +MODE_IO_Msk = 1<<0 +MODE_Input = 0<<0 +MODE_Output = 1<<0 + +# CONTROL register definitions +CONTROL_ENABLE_Msk = 1<<0 +CONTROL_CONTINUOS_Msk = 1<<1 +CONTROL_BUF_FLUSH_Msk = 1<<2 + +# STATUS register definitions +STATUS_ACTIVE_Msk = 1<<0 +STATUS_BUF_EMPTY_Msk = 1<<1 +STATUS_BUF_FULL_Msk = 1<<2 +STATUS_OVERFLOW_Msk = 1<<3 +STATUS_UNDERFLOW_Msk = 1<<4 +STATUS_EOS_Msk = 1<<5 + +# IRQ Status register definitions +IRQ_Status_FRAME_Msk = 1<<0 +IRQ_Status_OVERFLOW_Msk = 1<<1 +IRQ_Status_UNDERFLOW_Msk = 1<<2 +IRQ_Status_EOS_Msk = 1<<3 + +# Variables +Video = VideoClient() +Filename = "" +FilenameIdx = 0 + + +# Close VSI Video Server on exit +def cleanup(): + Video.closeServer() + + +# Client connection to VSI Video Server +def init(address, authkey): + global FILENAME_VALID + + base_dir = path.dirname(__file__) + server_path = path.join(base_dir, 'vsi_video_server.py') + + logging.info("Start video server") + if path.isfile(server_path): + # Start Video Server + if os_name == 'nt': + py_cmd = 'python' + else: + py_cmd = 'python3.9' + cmd = f"{py_cmd} {server_path} "\ + f"--ip {address[0]} "\ + f"--port {address[1]} "\ + f"--authkey {authkey}" + subprocess.Popen(cmd, shell=True) + # Connect to Video Server + Video.connectToServer(address, authkey) + if Video.conn == None: + logging.error("Server not connected") + + else: + logging.error(f"Server script not found: {server_path}") + + # Register clean-up function + atexit.register(cleanup) + + +## Flush Stream buffer +def flushBuffer(): + global STATUS, FRAME_INDEX, FRAME_COUNT + + STATUS |= STATUS_BUF_EMPTY_Msk + STATUS &= ~STATUS_BUF_FULL_Msk + + FRAME_INDEX = 0 + FRAME_COUNT = 0 + + +## VSI IRQ Status register +# @param IRQ_Status IRQ status register to update +# @param value status bits to clear +# @return IRQ_Status return updated register +def wrIRQ(IRQ_Status, value): + IRQ_Status_Clear = IRQ_Status & ~value + IRQ_Status &= ~IRQ_Status_Clear + + return IRQ_Status + + +## Timer Event +# @param IRQ_Status IRQ status register to update +# @return IRQ_Status return updated register +def timerEvent(IRQ_Status): + + IRQ_Status |= IRQ_Status_FRAME_Msk + + if (STATUS & STATUS_OVERFLOW_Msk) != 0: + IRQ_Status |= IRQ_Status_OVERFLOW_Msk + + if (STATUS & STATUS_UNDERFLOW_Msk) != 0: + IRQ_Status |= IRQ_Status_UNDERFLOW_Msk + + if (STATUS & STATUS_EOS_Msk) != 0: + IRQ_Status |= IRQ_Status_EOS_Msk + + if (CONTROL & CONTROL_CONTINUOS_Msk) == 0: + wrCONTROL(CONTROL & ~(CONTROL_ENABLE_Msk | CONTROL_CONTINUOS_Msk)) + + return IRQ_Status + + +## Read data from peripheral for DMA P2M transfer (VSI DMA) +# @param size size of data to read (in bytes, multiple of 4) +# @return data data read (bytearray) +def rdDataDMA(size): + global STATUS, FRAME_COUNT + + if (STATUS & STATUS_ACTIVE_Msk) != 0: + + if Video.conn != None: + data, eos = Video.readFrame() + if eos: + STATUS |= STATUS_EOS_Msk + if FRAME_COUNT < FRAME_COUNT_MAX: + FRAME_COUNT += 1 + else: + STATUS |= STATUS_OVERFLOW_Msk + if FRAME_COUNT == FRAME_COUNT_MAX: + STATUS |= STATUS_BUF_FULL_Msk + STATUS &= ~STATUS_BUF_EMPTY_Msk + else: + data = bytearray() + + else: + data = bytearray() + + return data + + +## Write data to peripheral for DMA M2P transfer (VSI DMA) +# @param data data to write (bytearray) +# @param size size of data to write (in bytes, multiple of 4) +def wrDataDMA(data, size): + global STATUS, FRAME_COUNT + + if (STATUS & STATUS_ACTIVE_Msk) != 0: + + if Video.conn != None: + Video.writeFrame(data) + if FRAME_COUNT > 0: + FRAME_COUNT -= 1 + else: + STATUS |= STATUS_UNDERFLOW_Msk + if FRAME_COUNT == 0: + STATUS |= STATUS_BUF_EMPTY_Msk + STATUS &= ~STATUS_BUF_FULL_Msk + + +## Write CONTROL register (user register) +# @param value value to write (32-bit) +def wrCONTROL(value): + global CONTROL, STATUS + + if ((value ^ CONTROL) & CONTROL_ENABLE_Msk) != 0: + STATUS &= ~STATUS_ACTIVE_Msk + if (value & CONTROL_ENABLE_Msk) != 0: + logging.info("Start video stream") + if Video.conn != None: + logging.info("Configure video stream") + configuration_valid = Video.configureStream(FRAME_WIDTH, FRAME_HEIGHT, COLOR_FORMAT, FRAME_RATE) + if configuration_valid: + logging.info("Enable video stream") + server_active = Video.enableStream(MODE) + if server_active: + STATUS |= STATUS_ACTIVE_Msk + STATUS &= ~(STATUS_OVERFLOW_Msk | STATUS_UNDERFLOW_Msk | STATUS_EOS_Msk) + else: + logging.error("Enable video stream failed") + else: + logging.error("Configure video stream failed") + else: + logging.error("Server not connected") + else: + logging.info("Stop video stream") + if Video.conn != None: + logging.info("Disable video stream") + Video.disableStream() + else: + logging.error("Server not connected") + + if (value & CONTROL_BUF_FLUSH_Msk) != 0: + value &= ~CONTROL_BUF_FLUSH_Msk + flushBuffer() + + CONTROL = value + + +## Read STATUS register (user register) +# @return status current STATUS User register (32-bit) +def rdSTATUS(): + global STATUS + + status = STATUS + STATUS &= ~(STATUS_OVERFLOW_Msk | STATUS_UNDERFLOW_Msk | STATUS_EOS_Msk) + + return status + + +## Write FILENAME_LEN register (user register) +# @param value value to write (32-bit) +def wrFILENAME_LEN(value): + global STATUS, FILENAME_LEN, FILENAME_VALID, Filename, FilenameIdx + + logging.info("Set new source name length and reset filename and valid flag") + FilenameIdx = 0 + Filename = "" + FILENAME_VALID = 0 + FILENAME_LEN = value + + +## Write FILENAME_CHAR register (user register) +# @param value value to write (32-bit) +def wrFILENAME_CHAR(value): + global FILENAME_VALID, Filename, FilenameIdx + + if FilenameIdx < FILENAME_LEN: + logging.info(f"Append {value} to filename") + Filename += f"{value}" + FilenameIdx += 1 + logging.debug(f"Received {FilenameIdx} of {FILENAME_LEN} characters") + + if FilenameIdx == FILENAME_LEN: + logging.info("Check if file exists on Server side and set VALID flag") + logging.debug(f"Filename: {Filename}") + + if Video.conn != None: + FILENAME_VALID = Video.setFilename(Filename, MODE) + else: + logging.error("Server not connected") + + logging.debug(f"Filename VALID: {FILENAME_VALID}") + + +## Write FRAME_INDEX register (user register) +# @param value value to write (32-bit) +# @return value value written (32-bit) +def wrFRAME_INDEX(value): + global STATUS, FRAME_INDEX, FRAME_COUNT + + FRAME_INDEX += 1 + if FRAME_INDEX == FRAME_COUNT_MAX: + FRAME_INDEX = 0 + + if (MODE & MODE_IO_Msk) == MODE_Input: + # Input + if FRAME_COUNT > 0: + FRAME_COUNT -= 1 + if FRAME_COUNT == 0: + STATUS |= STATUS_BUF_EMPTY_Msk + STATUS &= ~STATUS_BUF_FULL_Msk + else: + # Output + if FRAME_COUNT < FRAME_COUNT_MAX: + FRAME_COUNT += 1 + if FRAME_COUNT == FRAME_COUNT_MAX: + STATUS |= STATUS_BUF_FULL_Msk + STATUS &= ~STATUS_BUF_EMPTY_Msk + + return FRAME_INDEX + + +## Read user registers (the VSI User Registers) +# @param index user register index (zero based) +# @return value value read (32-bit) +def rdRegs(index): + value = 0 + + if index == 0: + value = MODE + elif index == 1: + value = CONTROL + elif index == 2: + value = rdSTATUS() + elif index == 3: + value = FILENAME_LEN + elif index == 4: + value = FILENAME_CHAR + elif index == 5: + value = FILENAME_VALID + elif index == 6: + value = FRAME_WIDTH + elif index == 7: + value = FRAME_HEIGHT + elif index == 8: + value = COLOR_FORMAT + elif index == 9: + value = FRAME_RATE + elif index == 10: + value = FRAME_INDEX + elif index == 11: + value = FRAME_COUNT + elif index == 12: + value = FRAME_COUNT_MAX + + return value + + +## Write user registers (the VSI User Registers) +# @param index user register index (zero based) +# @param value value to write (32-bit) +# @return value value written (32-bit) +def wrRegs(index, value): + global MODE, FRAME_WIDTH, FRAME_HEIGHT, COLOR_FORMAT, FRAME_RATE, FRAME_COUNT_MAX + + if index == 0: + MODE = value + elif index == 1: + wrCONTROL(value) + elif index == 2: + value = STATUS + elif index == 3: + wrFILENAME_LEN(value) + elif index == 4: + wrFILENAME_CHAR(chr(value)) + elif index == 5: + value = FILENAME_VALID + elif index == 6: + if value != 0: + FRAME_WIDTH = value + elif index == 7: + if value != 0: + FRAME_HEIGHT = value + elif index == 8: + COLOR_FORMAT = value + elif index == 9: + FRAME_RATE = value + elif index == 10: + value = wrFRAME_INDEX(value) + elif index == 11: + value = FRAME_COUNT + elif index == 12: + FRAME_COUNT_MAX = value + flushBuffer() + + return value diff --git a/cmsis-pack-examples/device/corstone/vsi/video/python/vsi_video_server.py b/cmsis-pack-examples/device/corstone/vsi/video/python/vsi_video_server.py new file mode 100644 index 0000000..831179c --- /dev/null +++ b/cmsis-pack-examples/device/corstone/vsi/video/python/vsi_video_server.py @@ -0,0 +1,455 @@ +# Copyright (c) 2023-2024 Arm Limited. All rights reserved. +# +# SPDX-License-Identifier: Apache-2.0 +# +# Licensed under the Apache License, Version 2.0 (the License); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an AS IS BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Python VSI Video Server module + +try: + import argparse + import ipaddress + import logging + import os + from multiprocessing.connection import Listener + + import cv2 + import numpy as np +except ImportError as err: + print(f"VSI:Video:Server:ImportError: {err}") +except Exception as e: + print(f"VSI:Video:Server:Exception: {type(e).__name__}") + + +## Set verbosity level +verbosity = logging.ERROR + +# [debugging] Verbosity settings +level = { 10: "DEBUG", 20: "INFO", 30: "WARNING", 40: "ERROR" } +logging.basicConfig(format='VSI Server: [%(levelname)s]\t%(message)s', level = verbosity) +logging.info("Verbosity level is set to " + level[verbosity]) + +# Default Server configuration +default_address = ('127.0.0.1', 6000) +default_authkey = 'vsi_video' + +# Supported file extensions +video_file_extensions = ('wmv', 'avi', 'mp4') +image_file_extensions = ('bmp', 'png', 'jpg') +video_fourcc = {'wmv' : 'WMV1', 'avi' : 'MJPG', 'mp4' : 'mp4v'} + +# Mode Input/Output +MODE_IO_Msk = 1<<0 +MODE_Input = 0<<0 +MODE_Output = 1<<0 + +class VideoServer: + def __init__(self, address, authkey): + # Server commands + self.SET_FILENAME = 1 + self.STREAM_CONFIGURE = 2 + self.STREAM_ENABLE = 3 + self.STREAM_DISABLE = 4 + self.FRAME_READ = 5 + self.FRAME_WRITE = 6 + self.CLOSE_SERVER = 7 + # Color space + self.GRAYSCALE8 = 1 + self.RGB888 = 2 + self.BGR565 = 3 + self.YUV420 = 4 + self.NV12 = 5 + self.NV21 = 6 + # Variables + self.listener = Listener(address, authkey=authkey.encode('utf-8')) + self.filename = "" + self.mode = None + self.active = False + self.video = True + self.stream = None + self.frame_ratio = 0 + self.frame_drop = 0 + self.frame_index = 0 + self.eos = False + # Stream configuration + self.resolution = (None, None) + self.color_format = None + self.frame_rate = None + + # Set filename + def _setFilename(self, base_dir, filename, mode): + filename_valid = False + + if self.active: + return filename_valid + + self.filename = "" + self.frame_index = 0 + + file_extension = str(filename).split('.')[-1].lower() + + if file_extension in video_file_extensions: + self.video = True + else: + self.video = False + + file_path = os.path.join(base_dir, filename) + logging.debug(f"File path: {file_path}") + + if (mode & MODE_IO_Msk) == MODE_Input: + self.mode = MODE_Input + if os.path.isfile(file_path): + if file_extension in (video_file_extensions + image_file_extensions): + self.filename = file_path + filename_valid = True + else: + self.mode = MODE_Output + if file_extension in (video_file_extensions + image_file_extensions): + if os.path.isfile(file_path): + os.remove(file_path) + self.filename = file_path + filename_valid = True + + return filename_valid + + # Configure video stream + def _configureStream(self, frame_width, frame_height, color_format, frame_rate): + if (frame_width == 0 or frame_height == 0 or frame_rate == 0): + return False + + self.resolution = (frame_width, frame_height) + self.color_format = color_format + self.frame_rate = frame_rate + + return True + + # Enable video stream + def _enableStream(self, mode): + if self.active: + return + + self.eos = False + self.frame_ratio = 0 + self.frame_drop = 0 + + if self.stream is not None: + self.stream.release() + self.stream = None + + if self.filename == "": + self.video = True + if (mode & MODE_IO_Msk) == MODE_Input: + # Device mode: camera + self.mode = MODE_Input + else: + # Device mode: display + self.mode = MODE_Output + + if self.video: + if self.mode == MODE_Input: + if self.filename == "": + self.stream = cv2.VideoCapture(0) + if not self.stream.isOpened(): + logging.error("Failed to open Camera interface") + return + else: + self.stream = cv2.VideoCapture(self.filename) + self.stream.set(cv2.CAP_PROP_POS_FRAMES, self.frame_index) + video_fps = self.stream.get(cv2.CAP_PROP_FPS) + if video_fps > self.frame_rate: + self.frame_ratio = video_fps / self.frame_rate + logging.debug(f"Frame ratio: {self.frame_ratio}") + else: + if self.filename != "": + extension = str(self.filename).split('.')[-1].lower() + fourcc = cv2.VideoWriter_fourcc(*f'{video_fourcc[extension]}') + + if os.path.isfile(self.filename) and (self.frame_index != 0): + tmp_filename = f'{self.filename.rstrip(f".{extension}")}_tmp.{extension}' + os.rename(self.filename, tmp_filename) + cap = cv2.VideoCapture(tmp_filename) + width = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH)) + height = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT)) + self.resolution = (width, height) + self.frame_rate = cap.get(cv2.CAP_PROP_FPS) + self.stream = cv2.VideoWriter(self.filename, fourcc, self.frame_rate, self.resolution) + + while cap.isOpened(): + ret, frame = cap.read() + if not ret: + cap.release() + os.remove(tmp_filename) + break + self.stream.write(frame) + del frame + + else: + self.stream = cv2.VideoWriter(self.filename, fourcc, self.frame_rate, self.resolution) + + self.active = True + logging.info("Stream enabled") + + # Disable Video Server + def _disableStream(self): + self.active = False + if self.stream is not None: + if self.mode == MODE_Input: + self.frame_index = self.stream.get(cv2.CAP_PROP_POS_FRAMES) + self.stream.release() + self.stream = None + logging.info("Stream disabled") + + # Resize frame to requested resolution in pixels + def __resizeFrame(self, frame, resolution): + frame_h = frame.shape[0] + frame_w = frame.shape[1] + + # Calculate requested aspect ratio (width/height): + crop_aspect_ratio = resolution[0] / resolution[1] + + if crop_aspect_ratio != (frame_w / frame_h): + # Crop into image with resize aspect ratio + crop_w = int(frame_h * crop_aspect_ratio) + crop_h = int(frame_w / crop_aspect_ratio) + + if crop_w > frame_w: + # Crop top and bottom part of the image + top = (frame_h - crop_h) // 2 + bottom = top + crop_h + frame = frame[top : bottom, 0 : frame_w] + elif crop_h > frame_h: + # Crop left and right side of the image`` + left = (frame_w - crop_w) // 2 + right = left + crop_w + frame = frame[0 : frame_h, left : right] + else: + # Crop to the center of the image + left = (frame_w - crop_w) // 2 + right = left + crop_w + top = (frame_h - crop_h) // 2 + bottom = top + crop_h + frame = frame[top : bottom, left : right] + logging.debug(f"Frame cropped from ({frame_w}, {frame_h}) to ({frame.shape[1]}, {frame.shape[0]})") + + logging.debug(f"Resize frame from ({frame.shape[1]}, {frame.shape[0]}) to ({resolution[0]}, {resolution[1]})") + try: + frame = cv2.resize(frame, resolution) + except Exception as e: + logging.error(f"Error in resizeFrame(): {e}") + + return frame + + # Change color space of a frame from BGR to selected profile + def __changeColorSpace(self, frame, color_space): + color_format = None + + # Default OpenCV color profile: BGR + if self.mode == MODE_Input: + if color_space == self.GRAYSCALE8: + color_format = cv2.COLOR_BGR2GRAY + elif color_space == self.RGB888: + color_format = cv2.COLOR_BGR2RGB + elif color_space == self.BGR565: + color_format = cv2.COLOR_BGR2BGR565 + elif color_space == self.YUV420: + color_format = cv2.COLOR_BGR2YUV_I420 + elif color_space == self.NV12: + frame = self.__changeColorSpace(frame, self.YUV420) + color_format = cv2.COLOR_YUV2RGB_NV12 + elif color_space == self.NV21: + frame = self.__changeColorSpace(frame, self.YUV420) + color_format = cv2.COLOR_YUV2RGB_NV21 + + else: + if color_space == self.GRAYSCALE8: + color_format = cv2.COLOR_GRAY2BGR + elif color_space == self.RGB888: + color_format = cv2.COLOR_RGB2BGR + elif color_space == self.BGR565: + color_format = cv2.COLOR_BGR5652BGR + elif color_space == self.YUV420: + color_format = cv2.COLOR_YUV2BGR_I420 + elif color_space == self.NV12: + color_format = cv2.COLOR_YUV2BGR_I420 + elif color_space == self.NV21: + color_format = cv2.COLOR_YUV2BGR_I420 + + if color_format != None: + logging.debug(f"Change color space to {color_format}") + try: + frame = cv2.cvtColor(frame, color_format) + except Exception as e: + logging.error(f"Error in changeColorSpace(): {e}") + + return frame + + # Read frame from source + def _readFrame(self): + frame = bytearray() + + if not self.active: + return frame + + if self.eos: + return frame + + if self.video: + if self.frame_ratio > 1: + _, tmp_frame = self.stream.read() + self.frame_drop += (self.frame_ratio - 1) + if self.frame_drop > 1: + logging.debug(f"Frames to drop: {self.frame_drop}") + drop = int(self.frame_drop // 1) + for i in range(drop): + _, _ = self.stream.read() + logging.debug(f"Frames dropped: {drop}") + self.frame_drop -= drop + logging.debug(f"Frames left to drop: {self.frame_drop}") + else: + _, tmp_frame = self.stream.read() + if tmp_frame is None: + self.eos = True + logging.debug("End of stream.") + else: + tmp_frame = cv2.imread(self.filename) + self.eos = True + logging.debug("End of stream.") + + if tmp_frame is not None: + tmp_frame = self.__resizeFrame(tmp_frame, self.resolution) + tmp_frame = self.__changeColorSpace(tmp_frame, self.color_format) + frame = bytearray(tmp_frame.tobytes()) + + return frame + + # Write frame to destination + def _writeFrame(self, frame): + if not self.active: + return + + try: + decoded_frame = np.frombuffer(frame, dtype=np.uint8) + decoded_frame = decoded_frame.reshape((self.resolution[0], self.resolution[1], 3)) + bgr_frame = self.__changeColorSpace(decoded_frame, self.RGB888) + + if self.filename == "": + cv2.imshow(self.filename, bgr_frame) + cv2.waitKey(10) + else: + if self.video: + self.stream.write(np.uint8(bgr_frame)) + self.frame_index += 1 + else: + cv2.imwrite(self.filename, bgr_frame) + except Exception: + pass + + # Run Video Server + def run(self): + logging.info("Video server started") + + try: + conn = self.listener.accept() + logging.info(f'Connection accepted {self.listener.address}') + except Exception: + logging.error("Connection not accepted") + return + + while True: + try: + recv = conn.recv() + except EOFError: + return + + cmd = recv[0] # Command + payload = recv[1:] # Payload + + if cmd == self.SET_FILENAME: + logging.info("Set filename called") + filename_valid = self._setFilename(payload[0], payload[1], payload[2]) + conn.send(filename_valid) + + elif cmd == self.STREAM_CONFIGURE: + logging.info("Stream configure called") + configuration_valid = self._configureStream(payload[0], payload[1], payload[2], payload[3]) + conn.send(configuration_valid) + + elif cmd == self.STREAM_ENABLE: + logging.info("Enable stream called") + self._enableStream(payload[0]) + conn.send(self.active) + + elif cmd == self.STREAM_DISABLE: + logging.info("Disable stream called") + self._disableStream() + conn.send(self.active) + + elif cmd == self.FRAME_READ: + logging.info("Read frame called") + frame = self._readFrame() + conn.send_bytes(frame) + conn.send(self.eos) + + elif cmd == self.FRAME_WRITE: + logging.info("Write frame called") + frame = conn.recv_bytes() + self._writeFrame(frame) + + elif cmd == self.CLOSE_SERVER: + logging.info("Close server connection") + self.stop() + + # Stop Video Server + def stop(self): + self._disableStream() + if (self.mode == MODE_Output) and (self.filename == ""): + try: + cv2.destroyAllWindows() + except Exception: + pass + self.listener.close() + logging.info("Video server stopped") + + +# Validate IP address +def ip(ip): + try: + _ = ipaddress.ip_address(ip) + return ip + except: + raise argparse.ArgumentTypeError(f"Invalid IP address: {ip}!") + +def parse_arguments(): + formatter = lambda prog: argparse.HelpFormatter(prog, max_help_position=41) + parser = argparse.ArgumentParser(formatter_class=formatter, description="VSI Video Server") + + parser_optional = parser.add_argument_group("optional") + parser_optional.add_argument("--ip", dest="ip", metavar="", + help=f"Server IP address (default: {default_address[0]})", + type=ip, default=default_address[0]) + parser_optional.add_argument("--port", dest="port", metavar="", + help=f"TCP port (default: {default_address[1]})", + type=int, default=default_address[1]) + parser_optional.add_argument("--authkey", dest="authkey", metavar="", + help=f"Authorization key (default: {default_authkey})", + type=str, default=default_authkey) + + return parser.parse_args() + +if __name__ == '__main__': + args = parse_arguments() + Server = VideoServer((args.ip, args.port), args.authkey) + try: + Server.run() + except KeyboardInterrupt: + Server.stop() diff --git a/cmsis-pack-examples/device/corstone/vsi/video/source/video_drv.c b/cmsis-pack-examples/device/corstone/vsi/video/source/video_drv.c new file mode 100644 index 0000000..f66aa00 --- /dev/null +++ b/cmsis-pack-examples/device/corstone/vsi/video/source/video_drv.c @@ -0,0 +1,601 @@ +/* + * Copyright (c) 2023-2024 Arm Limited. All rights reserved. + * + * SPDX-License-Identifier: Apache-2.0 + * + * Licensed under the Apache License, Version 2.0 (the License); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an AS IS BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include "video_drv.h" +#ifdef _RTE_ +#include "RTE_Components.h" +#endif +#include CMSIS_device_header +#include "arm_vsi.h" + +// Video channel definitions +#ifndef VIDEO_INPUT_CHANNELS +#define VIDEO_INPUT_CHANNELS 1 +#endif +#if (VIDEO_INPUT_CHANNELS > 2) +#error "Maximum 2 Video Input channels are supported!" +#endif +#ifndef VIDEO_OUTPUT_CHANNELS +#define VIDEO_OUTPUT_CHANNELS 1 +#endif +#if (VIDEO_OUTPUT_CHANNELS > 2) +#error "Maximum 2 Video Output channels are supported!" +#endif + +// Video peripheral definitions +#define VideoI0 ARM_VSI4 // Video Input channel 0 access struct +#define VideoI0_IRQn ARM_VSI4_IRQn // Video Input channel 0 Interrupt number +#define VideoI0_Handler ARM_VSI4_Handler // Video Input channel 0 Interrupt handler +#define VideoI1 ARM_VSI5 // Video Input channel 1 access struct +#define VideoI1_IRQn ARM_VSI5_IRQn // Video Input channel 1 Interrupt number +#define VideoI1_Handler ARM_VSI5_Handler // Video Input channel 1 Interrupt handler +#define VideoO0 ARM_VSI6 // Video Output channel 0 access struct +#define VideoO0_IRQn ARM_VSI6_IRQn // Video Output channel 0 Interrupt number +#define VideoO0_Handler ARM_VSI6_Handler // Video Output channel 0 Interrupt handler +#define VideoO1 ARM_VSI7 // Video Output channel 1 access struct +#define VideoO1_IRQn ARM_VSI7_IRQn // Video Output channel 1 Interrupt number +#define VideoO1_Handler ARM_VSI7_Handler // Video Output channel 1 Interrupt handler + +// Video Peripheral registers +#define Reg_MODE Regs[0] // Mode: 0=Input, 1=Output +#define Reg_CONTROL Regs[1] // Control: enable, continuos, flush +#define Reg_STATUS Regs[2] // Status: active, buf_empty, buf_full, overflow, underflow, eos +#define Reg_FILENAME_LEN Regs[3] // Filename length +#define Reg_FILENAME_CHAR Regs[4] // Filename character +#define Reg_FILENAME_VALID Regs[5] // Filename valid flag +#define Reg_FRAME_WIDTH Regs[6] // Requested frame width +#define Reg_FRAME_HEIGHT Regs[7] // Requested frame height +#define Reg_COLOR_FORMAT Regs[8] // Color format +#define Reg_FRAME_RATE Regs[9] // Frame rate +#define Reg_FRAME_INDEX Regs[10] // Frame index +#define Reg_FRAME_COUNT Regs[11] // Frame count +#define Reg_FRAME_COUNT_MAX Regs[12] // Frame count maximum + +// Video MODE register defintions +#define Reg_MODE_IO_Pos 0U +#define Reg_MODE_IO_Msk (1UL << Reg_MODE_IO_Pos) +#define Reg_MODE_Input (0UL << Reg_MODE_IO_Pos) +#define Reg_MODE_Output (1UL << Reg_MODE_IO_Pos) + +// Video CONTROL register definitions +#define Reg_CONTROL_ENABLE_Pos 0U +#define Reg_CONTROL_ENABLE_Msk (1UL << Reg_CONTROL_ENABLE_Pos) +#define Reg_CONTROL_CONTINUOS_Pos 1U +#define Reg_CONTROL_CONTINUOS_Msk (1UL << Reg_CONTROL_CONTINUOS_Pos) +#define Reg_CONTROL_BUF_FLUSH_Pos 2U +#define Reg_CONTROL_BUF_FLUSH_Msk (1UL << Reg_CONTROL_BUF_FLUSH_Pos) + +// Video STATUS register definitions +#define Reg_STATUS_ACTIVE_Pos 0U +#define Reg_STATUS_ACTIVE_Msk (1UL << Reg_STATUS_ACTIVE_Pos) +#define Reg_STATUS_BUF_EMPTY_Pos 1U +#define Reg_STATUS_BUF_EMPTY_Msk (1UL << Reg_STATUS_BUF_EMPTY_Pos) +#define Reg_STATUS_BUF_FULL_Pos 2U +#define Reg_STATUS_BUF_FULL_Msk (1UL << Reg_STATUS_BUF_FULL_Pos) +#define Reg_STATUS_OVERFLOW_Pos 3U +#define Reg_STATUS_OVERFLOW_Msk (1UL << Reg_STATUS_OVERFLOW_Pos) +#define Reg_STATUS_UNDERFLOW_Pos 4U +#define Reg_STATUS_UNDERFLOW_Msk (1UL << Reg_STATUS_UNDERFLOW_Pos) +#define Reg_STATUS_EOS_Pos 5U +#define Reg_STATUS_EOS_Msk (1UL << Reg_STATUS_EOS_Pos) + +// IRQ Status register definitions +#define Reg_IRQ_Status_FRAME_Pos 0U +#define Reg_IRQ_Status_FRAME_Msk (1UL << Reg_IRQ_Status_FRAME_Pos) +#define Reg_IRQ_Status_OVERFLOW_Pos 1U +#define Reg_IRQ_Status_OVERFLOW_Msk (1UL << Reg_IRQ_Status_OVERFLOW_Pos) +#define Reg_IRQ_Status_UNDERFLOW_Pos 2U +#define Reg_IRQ_Status_UNDERFLOW_Msk (1UL << Reg_IRQ_Status_UNDERFLOW_Pos) +#define Reg_IRQ_Status_EOS_Pos 3U +#define Reg_IRQ_Status_EOS_Msk (1UL << Reg_IRQ_Status_EOS_Pos) + +#define Reg_IRQ_Status_Msk Reg_IRQ_Status_FRAME_Msk | \ + Reg_IRQ_Status_OVERFLOW_Msk | \ + Reg_IRQ_Status_UNDERFLOW_Msk | \ + Reg_IRQ_Status_EOS_Msk + +// Video peripheral access structure +static ARM_VSI_Type * const pVideo[4] = { VideoI0, VideoO0, VideoI1, VideoO1 }; + +// Driver State +static uint8_t Initialized = 0U; +static uint8_t Configured[4] = { 0U, 0U, 0U, 0U }; + +// Event Callback +static VideoDrv_Event_t CB_Event = NULL; + +// Video Interrupt Handler +static void Video_Handler (uint32_t channel) { + uint32_t irq_status; + uint32_t event; + + irq_status = pVideo[channel]->IRQ.Status; + pVideo[channel]->IRQ.Clear = irq_status; + __DSB(); + __ISB(); + + event = 0U; + if (irq_status & Reg_IRQ_Status_FRAME_Msk) { + event |= VIDEO_DRV_EVENT_FRAME; + } + if (irq_status & Reg_IRQ_Status_OVERFLOW_Msk) { + event |= VIDEO_DRV_EVENT_OVERFLOW; + } + if (irq_status & Reg_IRQ_Status_UNDERFLOW_Msk) { + event |= VIDEO_DRV_EVENT_UNDERFLOW; + } + if (irq_status & Reg_IRQ_Status_EOS_Msk) { + event |= VIDEO_DRV_EVENT_EOS; + } + + if (CB_Event != NULL) { + CB_Event(channel, event); + } +} + +// Video channel 0 Interrupt Handler +#if (VIDEO_INPUT_CHANNELS >= 1) +void VideoI0_Handler (void); +void VideoI0_Handler (void) { + Video_Handler(0U); +} +#endif + +// Video channel 1 Interrupt Handler +#if (VIDEO_OUTPUT_CHANNELS >= 1) +void VideoO0_Handler (void); +void VideoO0_Handler (void) { + Video_Handler(1U); +} +#endif + +// Video channel 2 Interrupt Handler +#if (VIDEO_INPUT_CHANNELS >= 2) +void VideoI1_Handler (void); +void VideoI1_Handler (void) { + Video_Handler(2U); +} +#endif + +// Video channel 3 Interrupt Handler +#if (VIDEO_OUTPUT_CHANNELS >= 2) +void VideoO1_Handler (void); +void VideoO1_Handler (void) { + Video_Handler(3U); +} +#endif + +// Initialize Video Interface +int32_t VideoDrv_Initialize (VideoDrv_Event_t cb_event) { + + CB_Event = cb_event; + + // Initialize Video Input channel 0 + #if (VIDEO_INPUT_CHANNELS >= 1) + VideoI0->Timer.Control = 0U; + VideoI0->DMA.Control = 0U; + VideoI0->IRQ.Clear = Reg_IRQ_Status_Msk; + VideoI0->IRQ.Enable = Reg_IRQ_Status_Msk; + VideoI0->Reg_MODE = Reg_MODE_Input; + VideoI0->Reg_CONTROL = 0U; +// NVIC_EnableIRQ(VideoI0_IRQn); + NVIC->ISER[(((uint32_t)VideoI0_IRQn) >> 5UL)] = (uint32_t)(1UL << (((uint32_t)VideoI0_IRQn) & 0x1FUL)); + #endif + Configured[0] = 0U; + + // Initialize Video Output channel 0 + #if (VIDEO_OUTPUT_CHANNELS >= 1) + VideoO0->Timer.Control = 0U; + VideoO0->DMA.Control = 0U; + VideoO0->IRQ.Clear = Reg_IRQ_Status_Msk; + VideoO0->IRQ.Enable = Reg_IRQ_Status_Msk; + VideoO0->Reg_MODE = Reg_MODE_Output; + VideoO0->Reg_CONTROL = 0U; +// NVIC_EnableIRQ(VideoO0_IRQn); + NVIC->ISER[(((uint32_t)VideoO0_IRQn) >> 5UL)] = (uint32_t)(1UL << (((uint32_t)VideoO0_IRQn) & 0x1FUL)); + #endif + Configured[1] = 0U; + + // Initialize Video Input channel 1 + #if (VIDEO_INPUT_CHANNELS >= 2) + VideoI1->Timer.Control = 0U; + VideoI1->DMA.Control = 0U; + VideoI1->IRQ.Clear = Reg_IRQ_Status_Msk; + VideoI1->IRQ.Enable = Reg_IRQ_Status_Msk; + VideoI1->Reg_MODE = Reg_MODE_Input; + VideoI1->Reg_CONTROL = 0U; +// NVIC_EnableIRQ(VideoI1_IRQn); + NVIC->ISER[(((uint32_t)VideoI1_IRQn) >> 5UL)] = (uint32_t)(1UL << (((uint32_t)VideoI1_IRQn) & 0x1FUL)); + #endif + Configured[2] = 0U; + + // Initialize Video Output channel 1 + #if (VIDEO_OUTPUT_CHANNELS >= 2) + VideoO1->Timer.Control = 0U; + VideoO1->DMA.Control = 0U; + VideoO1->IRQ.Clear = Reg_IRQ_Status_Msk; + VideoO1->IRQ.Enable = Reg_IRQ_Status_Msk; + VideoO1->Reg_MODE = Reg_MODE_Output; + VideoO1->Reg_CONTROL = 0U; +// NVIC_EnableIRQ(VideoO1_IRQn); + NVIC->ISER[(((uint32_t)VideoO1_IRQn) >> 5UL)] = (uint32_t)(1UL << (((uint32_t)VideoO1_IRQn) & 0x1FUL)); + #endif + Configured[3] = 0U; + + __DSB(); + __ISB(); + + Initialized = 1U; + + return VIDEO_DRV_OK; +} + +// De-initialize Video Interface +int32_t VideoDrv_Uninitialize (void) { + + // De-initialize Video Input channel 0 + #if (VIDEO_INPUT_CHANNELS >= 1) +// NVIC_DisableIRQ(VideoI0_IRQn); + NVIC->ICER[(((uint32_t)VideoI0_IRQn) >> 5UL)] = (uint32_t)(1UL << (((uint32_t)VideoI0_IRQn) & 0x1FUL)); + VideoI0->Timer.Control = 0U; + VideoI0->DMA.Control = 0U; + VideoI0->IRQ.Clear = Reg_IRQ_Status_Msk; + VideoI0->IRQ.Enable = 0U; + VideoI0->Reg_CONTROL = 0U; + #endif + + // De-initialize Video Output channel 0 + #if (VIDEO_OUTPUT_CHANNELS >= 1) +// NVIC_DisableIRQ(VideoO0_IRQn); + NVIC->ICER[(((uint32_t)VideoO0_IRQn) >> 5UL)] = (uint32_t)(1UL << (((uint32_t)VideoO0_IRQn) & 0x1FUL)); + VideoO0->Timer.Control = 0U; + VideoO0->DMA.Control = 0U; + VideoO0->IRQ.Clear = Reg_IRQ_Status_Msk; + VideoO0->IRQ.Enable = 0U; + VideoO0->Reg_CONTROL = 0U; + #endif + + // De-initialize Video Input channel 1 + #if (VIDEO_INPUT_CHANNELS >= 2) +// NVIC_DisableIRQ(VideoI1_IRQn); + NVIC->ICER[(((uint32_t)VideoI1_IRQn) >> 5UL)] = (uint32_t)(1UL << (((uint32_t)VideoI1_IRQn) & 0x1FUL)); + VideoI1->Timer.Control = 0U; + VideoI1->DMA.Control = 0U; + VideoI1->IRQ.Clear = Reg_IRQ_Status_Msk; + VideoI1->IRQ.Enable = 0U; + VideoI1->Reg_CONTROL = 0U; + #endif + + // De-initialize Video Output channel 1 + #if (VIDEO_OUTPUT_CHANNELS >= 2) +// NVIC_DisableIRQ(VideoO1_IRQn); + NVIC->ICER[(((uint32_t)VideoO1_IRQn) >> 5UL)] = (uint32_t)(1UL << (((uint32_t)VideoO1_IRQn) & 0x1FUL)); + VideoO1->Timer.Control = 0U; + VideoO1->DMA.Control = 0U; + VideoO1->IRQ.Clear = Reg_IRQ_Status_Msk; + VideoO1->IRQ.Enable = 0U; + VideoO1->Reg_CONTROL = 0U; + #endif + + __DSB(); + __ISB(); + + Initialized = 0U; + + return VIDEO_DRV_OK; +} + +// Set Video Interface file +int32_t VideoDrv_SetFile (uint32_t channel, const char *name) { + const char *p; + uint32_t n; + + if ((((channel & 1U) == 0U) && ((channel >> 1) >= VIDEO_INPUT_CHANNELS)) || + (((channel & 1U) != 0U) && ((channel >> 1) >= VIDEO_OUTPUT_CHANNELS)) || + (name == NULL)) { + return VIDEO_DRV_ERROR_PARAMETER; + } + + if (Initialized == 0U) { + return VIDEO_DRV_ERROR; + } + + if ((pVideo[channel]->Reg_STATUS & Reg_STATUS_ACTIVE_Msk) != 0U) { + return VIDEO_DRV_ERROR; + } + + // Register Video filename + n = strlen(name); + pVideo[channel]->Reg_FILENAME_LEN = n; + for (p = name; n != 0U; n--) { + pVideo[channel]->Reg_FILENAME_CHAR = *p++; + } + if (pVideo[channel]->Reg_FILENAME_VALID == 0U) { + return VIDEO_DRV_ERROR; + } + + return VIDEO_DRV_OK; +} + +// Configure Video Interface +int32_t VideoDrv_Configure (uint32_t channel, uint32_t frame_width, uint32_t frame_height, uint32_t color_format, uint32_t frame_rate) { + uint32_t pixel_size; + uint32_t block_size; + + if ((((channel & 1U) == 0U) && ((channel >> 1) >= VIDEO_INPUT_CHANNELS)) || + (((channel & 1U) != 0U) && ((channel >> 1) >= VIDEO_OUTPUT_CHANNELS)) || + (frame_width == 0U) || + (frame_height == 0U) || + (frame_rate == 0U) || + (color_format <= VIDEO_DRV_COLOR_FORMAT_BEGIN) || + (color_format >= VIDEO_DRV_COLOR_FORMAT_END)) { + return VIDEO_DRV_ERROR_PARAMETER; + } + + switch (color_format) { + case VIDEO_DRV_COLOR_GRAYSCALE8: + pixel_size = 8U; + break; + case VIDEO_DRV_COLOR_YUV420: + pixel_size = 12U; + break; + case VIDEO_DRV_COLOR_BGR565: + pixel_size = 16U; + break; + case VIDEO_DRV_COLOR_RGB888: + case VIDEO_DRV_COLOR_NV12: + case VIDEO_DRV_COLOR_NV21: + pixel_size = 24U; + break; + default: + return VIDEO_DRV_ERROR_PARAMETER; + } + + block_size = (((frame_width * frame_height) * pixel_size) + 7U) / 8U; + block_size = (block_size + 3U) & ~3U; + + if (Initialized == 0U) { + return VIDEO_DRV_ERROR; + } + + if ((pVideo[channel]->Reg_STATUS & Reg_STATUS_ACTIVE_Msk) != 0U) { + return VIDEO_DRV_ERROR; + } + + pVideo[channel]->Reg_FRAME_WIDTH = frame_width; + pVideo[channel]->Reg_FRAME_HEIGHT = frame_height; + pVideo[channel]->Reg_COLOR_FORMAT = color_format; + pVideo[channel]->Reg_FRAME_RATE = frame_rate; + pVideo[channel]->Timer.Interval = 1000000U / frame_rate; + pVideo[channel]->DMA.BlockSize = block_size; + + Configured[channel] = 1U; + + return VIDEO_DRV_OK; +} + +// Set Video Interface buffer +int32_t VideoDrv_SetBuf (uint32_t channel, void *buf, uint32_t buf_size) { + uint32_t block_num; + + if ((((channel & 1U) == 0U) && ((channel >> 1) >= VIDEO_INPUT_CHANNELS)) || + (((channel & 1U) != 0U) && ((channel >> 1) >= VIDEO_OUTPUT_CHANNELS)) || + (buf == NULL) || + (buf_size == 0U)) { + return VIDEO_DRV_ERROR_PARAMETER; + } + + if ((Initialized == 0U) || + (Configured[channel] == 0U)) { + return VIDEO_DRV_ERROR; + } + + if ((pVideo[channel]->Reg_STATUS & Reg_STATUS_ACTIVE_Msk) != 0U) { + return VIDEO_DRV_ERROR; + } + + block_num = buf_size / pVideo[channel]->DMA.BlockSize; + if (block_num == 0U) { + return VIDEO_DRV_ERROR; + } + + pVideo[channel]->Reg_FRAME_COUNT_MAX = block_num; + pVideo[channel]->DMA.BlockNum = block_num; + + pVideo[channel]->DMA.Address = (uint32_t)buf; + + Configured[channel] = 2U; + + return VIDEO_DRV_OK; +} + +// Flush Video Interface buffer +int32_t VideoDrv_FlushBuf (uint32_t channel) { + + if ((((channel & 1U) == 0U) && ((channel >> 1) >= VIDEO_INPUT_CHANNELS)) || + (((channel & 1U) != 0U) && ((channel >> 1) >= VIDEO_OUTPUT_CHANNELS))) { + return VIDEO_DRV_ERROR_PARAMETER; + } + + if (Initialized == 0U) { + return VIDEO_DRV_ERROR; + } + + if ((pVideo[channel]->Reg_STATUS & Reg_STATUS_ACTIVE_Msk) != 0U) { + return VIDEO_DRV_ERROR; + } + + pVideo[channel]->Reg_CONTROL = Reg_CONTROL_BUF_FLUSH_Msk; + + return VIDEO_DRV_OK; +} + +// Start Stream on Video Interface +int32_t VideoDrv_StreamStart (uint32_t channel, uint32_t mode) { + uint32_t control; + + if ((((channel & 1U) == 0U) && ((channel >> 1) >= VIDEO_INPUT_CHANNELS)) || + (((channel & 1U) != 0U) && ((channel >> 1) >= VIDEO_OUTPUT_CHANNELS)) || + (mode > VIDEO_DRV_MODE_CONTINUOS)) { + return VIDEO_DRV_ERROR_PARAMETER; + } + + if ((Initialized == 0U) || + (Configured[channel] < 2U)) { + return VIDEO_DRV_ERROR; + } + + if ((pVideo[channel]->Reg_STATUS & Reg_STATUS_ACTIVE_Msk) != 0U) { + return VIDEO_DRV_OK; + } + + control = Reg_CONTROL_ENABLE_Msk; + if (mode == VIDEO_DRV_MODE_CONTINUOS) { + control |= Reg_CONTROL_CONTINUOS_Msk; + } + pVideo[channel]->Reg_CONTROL = control; + + if ((pVideo[channel]->Reg_STATUS & Reg_STATUS_ACTIVE_Msk) == 0U) { + return VIDEO_DRV_ERROR; + } + + control = ARM_VSI_DMA_Enable_Msk; + if ((channel & 1U) == 0U) { + control |= ARM_VSI_DMA_Direction_P2M; + } else { + control |= ARM_VSI_DMA_Direction_M2P; + } + pVideo[channel]->DMA.Control = control; + + control = ARM_VSI_Timer_Run_Msk | + ARM_VSI_Timer_Trig_DMA_Msk | + ARM_VSI_Timer_Trig_IRQ_Msk; + if (mode == VIDEO_DRV_MODE_CONTINUOS) { + control |= ARM_VSI_Timer_Periodic_Msk; + } + pVideo[channel]->Timer.Control = control; + + return VIDEO_DRV_OK; +} + +// Stop Stream on Video Interface +int32_t VideoDrv_StreamStop (uint32_t channel) { + + if ((((channel & 1U) == 0U) && ((channel >> 1) >= VIDEO_INPUT_CHANNELS)) || + (((channel & 1U) != 0U) && ((channel >> 1) >= VIDEO_OUTPUT_CHANNELS))) { + return VIDEO_DRV_ERROR_PARAMETER; + } + + if ((Initialized == 0U) || + (Configured[channel] < 2U)) { + return VIDEO_DRV_ERROR; + } + + if ((pVideo[channel]->Reg_STATUS & Reg_STATUS_ACTIVE_Msk) == 0U) { + return VIDEO_DRV_OK; + } + + pVideo[channel]->Timer.Control = 0U; + pVideo[channel]->DMA.Control = 0U; + pVideo[channel]->Reg_CONTROL = 0U; + + return VIDEO_DRV_OK; +} + +// Get Video Frame buffer +void *VideoDrv_GetFrameBuf (uint32_t channel) { + void *frame = NULL; + + if ((((channel & 1U) == 0U) && ((channel >> 1) >= VIDEO_INPUT_CHANNELS)) || + (((channel & 1U) != 0U) && ((channel >> 1) >= VIDEO_OUTPUT_CHANNELS))) { + return NULL; + } + + if ((Initialized == 0U) || + (Configured[channel] < 2U)) { + return NULL; + } + + if ((pVideo[channel]->Reg_MODE & Reg_MODE_IO_Msk) == Reg_MODE_Input) { + // Input + if ((pVideo[channel]->Reg_STATUS & Reg_STATUS_BUF_EMPTY_Msk) != 0U) { + return NULL; + } + } else { + // Output + if ((pVideo[channel]->Reg_STATUS & Reg_STATUS_BUF_FULL_Msk) != 0U) { + return NULL; + } + } + + frame = (void *)(pVideo[channel]->DMA.Address + (pVideo[channel]->Reg_FRAME_INDEX * pVideo[channel]->DMA.BlockSize)); + + return frame; +} + +// Release Video Frame +int32_t VideoDrv_ReleaseFrame (uint32_t channel) { + + if ((((channel & 1U) == 0U) && ((channel >> 1) >= VIDEO_INPUT_CHANNELS)) || + (((channel & 1U) != 0U) && ((channel >> 1) >= VIDEO_OUTPUT_CHANNELS))) { + return VIDEO_DRV_ERROR_PARAMETER; + } + + if ((Initialized == 0U) || + (Configured[channel] < 2U)) { + return VIDEO_DRV_ERROR; + } + + if ((pVideo[channel]->Reg_MODE & Reg_MODE_IO_Msk) == Reg_MODE_Input) { + // Input + if ((pVideo[channel]->Reg_STATUS & Reg_STATUS_BUF_EMPTY_Msk) != 0U) { + return VIDEO_DRV_ERROR; + } + } else { + // Output + if ((pVideo[channel]->Reg_STATUS & Reg_STATUS_BUF_FULL_Msk) != 0U) { + return VIDEO_DRV_ERROR; + } + } + + pVideo[channel]->Reg_FRAME_INDEX = 0U; + + return VIDEO_DRV_OK; +} + + +// Get Video Interface status +VideoDrv_Status_t VideoDrv_GetStatus (uint32_t channel) { + VideoDrv_Status_t status = { 0U, 0U, 0U, 0U, 0U, 0U, 0U }; + uint32_t status_reg; + + if ((((channel & 1U) == 0U) && ((channel >> 1) < VIDEO_INPUT_CHANNELS)) || + (((channel & 1U) != 0U) && ((channel >> 1) < VIDEO_OUTPUT_CHANNELS))) { + status_reg = pVideo[channel]->Reg_STATUS; + status.active = (status_reg & Reg_STATUS_ACTIVE_Msk) >> Reg_STATUS_ACTIVE_Pos; + status.buf_empty = (status_reg & Reg_STATUS_BUF_EMPTY_Msk) >> Reg_STATUS_BUF_EMPTY_Pos; + status.buf_full = (status_reg & Reg_STATUS_BUF_FULL_Msk) >> Reg_STATUS_BUF_FULL_Pos; + status.overflow = (status_reg & Reg_STATUS_OVERFLOW_Msk) >> Reg_STATUS_OVERFLOW_Pos; + status.underflow = (status_reg & Reg_STATUS_UNDERFLOW_Msk) >> Reg_STATUS_UNDERFLOW_Pos; + status.eos = (status_reg & Reg_STATUS_EOS_Msk) >> Reg_STATUS_EOS_Pos; + } + + return status; +} diff --git a/cmsis-pack-examples/device/corstone/vsi/vsi.clayer.yml b/cmsis-pack-examples/device/corstone/vsi/vsi.clayer.yml new file mode 100644 index 0000000..b3289a0 --- /dev/null +++ b/cmsis-pack-examples/device/corstone/vsi/vsi.clayer.yml @@ -0,0 +1,28 @@ +# SPDX-FileCopyrightText: Copyright 2022-2024 Arm Limited and/or its +# affiliates +# SPDX-License-Identifier: Apache-2.0 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# yaml-language-server: $schema=https://raw.githubusercontent.com/Open-CMSIS-Pack/devtools/schemas/projmgr/2.4.0/tools/projmgr/schemas/clayer.schema.json + +layer: + + description: VSI video components and files. + + groups: + - group: VSI + files: + - file: ./include/arm_vsi.h + - file: ./video/include/video_drv.h + - file: ./video/source/video_drv.c \ No newline at end of file diff --git a/cmsis-pack-examples/kws/src/main_live.cpp b/cmsis-pack-examples/kws/src/main_live.cpp index 98a3bad..55fbe6e 100644 --- a/cmsis-pack-examples/kws/src/main_live.cpp +++ b/cmsis-pack-examples/kws/src/main_live.cpp @@ -20,7 +20,7 @@ * CMSIS pack produced by ml-embedded-eval-kit. The pack consists * of platform agnostic end-to-end ML use case API's that can be * used to construct ML examples for any target that can support - * the memory requirements for TensorFlow-Lite-Micro framework and + * the memory requirements for TensorFlow Lite Micro framework and * some heap for the API runtime. */ #include "AudioUtils.hpp" /* Generic audio utilities like sliding windows. */ diff --git a/cmsis-pack-examples/kws/src/main_wav.cpp b/cmsis-pack-examples/kws/src/main_wav.cpp index f12f72e..da20b73 100644 --- a/cmsis-pack-examples/kws/src/main_wav.cpp +++ b/cmsis-pack-examples/kws/src/main_wav.cpp @@ -21,7 +21,7 @@ * CMSIS pack produced by ml-embedded-eval-kit. The pack consists * of platform agnostic end-to-end ML use case API's that can be * used to construct ML examples for any target that can support - * the memory requirements for TensorFlow-Lite-Micro framework and + * the memory requirements for TensorFlow Lite Micro framework and * some heap for the API runtime. */ #include "AudioUtils.hpp" diff --git a/cmsis-pack-examples/mlek.csolution.yml b/cmsis-pack-examples/mlek.csolution.yml index 94464a8..199f6a3 100644 --- a/cmsis-pack-examples/mlek.csolution.yml +++ b/cmsis-pack-examples/mlek.csolution.yml @@ -33,8 +33,8 @@ solution: - pack: tensorflow::ruy@1.23.2 - pack: tensorflow::tensorflow-lite-micro@1.23.2 - - pack: ARM::V2M_MPS3_SSE_300_BSP@1.4.0 - - pack: ARM::V2M_MPS3_SSE_310_BSP@1.3.0 + - pack: ARM::V2M_MPS3_SSE_300_BSP@1.5.0 + - pack: ARM::V2M_MPS3_SSE_310_BSP@1.4.0 - pack: NXP::FRDM-K64F_BSP@14.0.0 - pack: NXP::MK64F12_DFP@14.0.0 - pack: Keil::STM32F7xx_DFP@2.15.1 @@ -55,12 +55,15 @@ solution: - type: AVH-SSE-300 board: ARM::V2M-MPS3-SSE-300-FVP device: ARM::SSE-300-MPS3 + define: + - CORSTONE300_FVP - type: AVH-SSE-300-U55 board: ARM::V2M-MPS3-SSE-300-FVP device: ARM::SSE-300-MPS3 define: - ETHOSU55 + - CORSTONE300_FVP - type: AVH-SSE-310 board: ARM::V2M-MPS3-SSE-310 @@ -101,6 +104,7 @@ solution: device: ARM::SSE-300-MPS3 define: - ETHOSU65 + - CORSTONE300_FVP - type: AVH-SSE-310-U65 board: ARM::V2M-MPS3-SSE-310 @@ -109,6 +113,7 @@ solution: - ETHOSU65 projects: + # Object detection use case - project: ./object-detection/object-detection.cproject.yml for-context: - +AVH-SSE-300-U65 @@ -118,6 +123,18 @@ solution: - +AVH-SSE-300 - +AVH-SSE-310 - +Alif-E7-M55-HP + + # Object detection with Virtual Streaming Interface (VSI) support + - project: ./object-detection/object-detection-vsi.cproject.yml + for-context: + - +AVH-SSE-300-U65 + - +AVH-SSE-310-U65 + - +AVH-SSE-300-U55 + - +AVH-SSE-310-U55 + - +AVH-SSE-300 + - +AVH-SSE-310 + + # Keyword Spotting (KWS) use case - project: ./kws/kws.cproject.yml for-context: - +AVH-SSE-300-U65 diff --git a/cmsis-pack-examples/object-detection/object-detection-vsi.cproject.yml b/cmsis-pack-examples/object-detection/object-detection-vsi.cproject.yml new file mode 100644 index 0000000..30d09fd --- /dev/null +++ b/cmsis-pack-examples/object-detection/object-detection-vsi.cproject.yml @@ -0,0 +1,41 @@ +# SPDX-FileCopyrightText: Copyright 2022-2024 Arm Limited and/or its +# affiliates +# SPDX-License-Identifier: Apache-2.0 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# yaml-language-server: $schema=https://raw.githubusercontent.com/Open-CMSIS-Pack/devtools/schemas/projmgr/2.4.0/tools/projmgr/schemas/cproject.schema.json + +project: + output: + type: + - elf + - bin + + groups: + - group: MainVSI + files: + - file: src/main_vsi.cpp + + layers: + - layer: ../common/common.clayer.yml + - layer: ../device/corstone/vsi/vsi.clayer.yml + - layer: ../device/corstone/corstone-device.clayer.yml + for-context: + - +AVH-SSE-300 + - +AVH-SSE-300-U55 + - +AVH-SSE-300-U65 + - +AVH-SSE-310 + - +AVH-SSE-310-U55 + - +AVH-SSE-310-U65 + - layer: ./object-detection.clayer.yml diff --git a/cmsis-pack-examples/object-detection/object-detection.clayer.yml b/cmsis-pack-examples/object-detection/object-detection.clayer.yml new file mode 100644 index 0000000..b7673e0 --- /dev/null +++ b/cmsis-pack-examples/object-detection/object-detection.clayer.yml @@ -0,0 +1,58 @@ +# SPDX-FileCopyrightText: Copyright 2022-2024 Arm Limited and/or its +# affiliates +# SPDX-License-Identifier: Apache-2.0 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# yaml-language-server: $schema=https://raw.githubusercontent.com/Open-CMSIS-Pack/devtools/schemas/projmgr/2.4.0/tools/projmgr/schemas/cproject.schema.json + +layer: + description: Common layer for object detection project + + groups: + - group: Use Case + files: + - file: src/yolo-fastest_192_face_v4.tflite.cpp + for-context: + - +AVH-SSE-300 + - +AVH-SSE-310 + - file: src/yolo-fastest_192_face_v4_vela_H256.tflite.cpp + for-context: + - +AVH-SSE-300-U55 + - +AVH-SSE-310-U55 + - +Alif-E7-M55-HP + - file: src/yolo-fastest_192_face_v4_vela_Y256.tflite.cpp + for-context: + - +AVH-SSE-300-U65 + - +AVH-SSE-310-U65 + + - group: Device Files + files: + - file: linker/mps3-sse-300.sct + for-context: + - +AVH-SSE-300 + - +AVH-SSE-300-U55 + - +AVH-SSE-300-U65 + - file: linker/mps3-sse-310.sct + for-context: + - +AVH-SSE-310 + - +AVH-SSE-310-U55 + - +AVH-SSE-310-U65 + - file: linker/alif-e7-m55-hp.sct + for-context: +Alif-E7-M55-HP + + define: + - ACTIVATION_BUF_SZ: 532480 + + components: + - component: ARM::ML Eval Kit:Vision:Object detection diff --git a/cmsis-pack-examples/object-detection/object-detection.cproject.yml b/cmsis-pack-examples/object-detection/object-detection.cproject.yml index 7b4830b..fb27861 100644 --- a/cmsis-pack-examples/object-detection/object-detection.cproject.yml +++ b/cmsis-pack-examples/object-detection/object-detection.cproject.yml @@ -23,54 +23,25 @@ project: - bin groups: - - group: Use Case + - group: MainStatic + for-context: + - +AVH-SSE-300 + - +AVH-SSE-300-U55 + - +AVH-SSE-300-U65 + - +AVH-SSE-310 + - +AVH-SSE-310-U55 + - +AVH-SSE-310-U65 files: - file: include/InputFiles.hpp - file: src/InputFiles.cpp - file: src/sample_image.cpp - - file: src/yolo-fastest_192_face_v4.tflite.cpp - for-context: - - +AVH-SSE-300 - - +AVH-SSE-310 - - file: src/yolo-fastest_192_face_v4_vela_H256.tflite.cpp - for-context: - - +AVH-SSE-300-U55 - - +AVH-SSE-310-U55 - - +Alif-E7-M55-HP - - file: src/yolo-fastest_192_face_v4_vela_Y256.tflite.cpp - for-context: - - +AVH-SSE-300-U65 - - +AVH-SSE-310-U65 - file: src/main_static.cpp - for-context: - - +AVH-SSE-300 - - +AVH-SSE-300-U55 - - +AVH-SSE-300-U65 - - +AVH-SSE-310 - - +AVH-SSE-310-U55 - - +AVH-SSE-310-U65 - - file: src/main_live.cpp - for-context: - - +Alif-E7-M55-HP - - group: Device Files + - group: MainLive + for-context: + - +Alif-E7-M55-HP files: - - file: linker/mps3-sse-300.sct - for-context: - - +AVH-SSE-300 - - +AVH-SSE-300-U55 - - +AVH-SSE-300-U65 - - file: linker/mps3-sse-310.sct - for-context: - - +AVH-SSE-310 - - +AVH-SSE-310-U55 - - +AVH-SSE-310-U65 - - file: linker/alif-e7-m55-hp.sct - for-context: +Alif-E7-M55-HP - - - define: - - ACTIVATION_BUF_SZ: 532480 + - file: src/main_live.cpp layers: - layer: ../common/common.clayer.yml @@ -88,5 +59,4 @@ project: for-context: - +Alif-E7-M55-HP - components: - - component: ARM::ML Eval Kit:Vision:Object detection + - layer: ./object-detection.clayer.yml diff --git a/cmsis-pack-examples/object-detection/src/main_live.cpp b/cmsis-pack-examples/object-detection/src/main_live.cpp index acbedd3..d89b5de 100644 --- a/cmsis-pack-examples/object-detection/src/main_live.cpp +++ b/cmsis-pack-examples/object-detection/src/main_live.cpp @@ -21,7 +21,7 @@ * CMSIS pack produced by ml-embedded-eval-kit. The pack consists * of platform agnostic end-to-end ML use case API's that can be * used to construct ML examples for any target that can support - * the memory requirements for TensorFlow-Lite-Micro framework and + * the memory requirements for TensorFlow Lite Micro framework and * some heap for the API runtime. */ #include "BufAttributes.hpp" /* Buffer attributes to be applied */ diff --git a/cmsis-pack-examples/object-detection/src/main_static.cpp b/cmsis-pack-examples/object-detection/src/main_static.cpp index 4b8517a..32f0ab0 100644 --- a/cmsis-pack-examples/object-detection/src/main_static.cpp +++ b/cmsis-pack-examples/object-detection/src/main_static.cpp @@ -1,5 +1,5 @@ /* - * SPDX-FileCopyrightText: Copyright 2021-2023 Arm Limited and/or its + * SPDX-FileCopyrightText: Copyright 2021-2024 Arm Limited and/or its * affiliates * SPDX-License-Identifier: Apache-2.0 * @@ -21,7 +21,7 @@ * CMSIS pack produced by ml-embedded-eval-kit. The pack consists * of platform agnostic end-to-end ML use case API's that can be * used to construct ML examples for any target that can support - * the memory requirements for TensorFlow-Lite-Micro framework and + * the memory requirements for TensorFlow Lite Micro framework and * some heap for the API runtime. */ #include "BufAttributes.hpp" /* Buffer attributes to be applied */ diff --git a/cmsis-pack-examples/object-detection/src/main_vsi.cpp b/cmsis-pack-examples/object-detection/src/main_vsi.cpp new file mode 100644 index 0000000..0f68944 --- /dev/null +++ b/cmsis-pack-examples/object-detection/src/main_vsi.cpp @@ -0,0 +1,313 @@ +/* + * SPDX-FileCopyrightText: Copyright 2021-2024 Arm Limited and/or its + * affiliates + * SPDX-License-Identifier: Apache-2.0 + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * This object detection example is intended to work with the + * CMSIS pack produced by ml-embedded-eval-kit. The pack consists + * of platform agnostic end-to-end ML use case API's that can be + * used to construct ML examples for any target that can support + * the memory requirements for TensorFlow Lite Micro framework and + * some heap for the API runtime. + */ +#include "BufAttributes.hpp" /* Buffer attributes to be applied */ +#include "Classifier.hpp" /* Classifier for the result */ +#include "DetectionResult.hpp" +#include "DetectorPostProcessing.hpp" /* Post Process */ +#include "DetectorPreProcessing.hpp" /* Pre Process */ +#include "YoloFastestModel.hpp" /* Model API */ +#include "video_drv.h" /* Video Driver API */ + +/* Platform dependent files */ +#include "RTE_Components.h" /* Provides definition for CMSIS_device_header */ +#include CMSIS_device_header /* Gives us IRQ num, base addresses. */ +#include "BoardInit.hpp" /* Board initialisation */ +#include "log_macros.h" /* Logging macros (optional) */ + + +#define IMAGE_WIDTH 192 +#define IMAGE_HEIGHT 192 +#define IMAGE_SIZE (IMAGE_WIDTH * IMAGE_HEIGHT * 3) + +namespace arm { +namespace app { + /* Tensor arena buffer */ + static uint8_t tensorArena[ACTIVATION_BUF_SZ] ACTIVATION_BUF_ATTRIBUTE; + + /* Image buffer */ + static uint8_t ImageBuf[IMAGE_SIZE] __attribute__((section("image_buf"), aligned(16))); + static uint8_t ImageOut[IMAGE_SIZE] __attribute__((section("image_buf"), aligned(16))); + + /* Optional getter function for the model pointer and its size. */ + namespace object_detection { + extern uint8_t* GetModelPointer(); + extern size_t GetModelLen(); + } /* namespace object_detection */ +} /* namespace app */ +} /* namespace arm */ + +typedef arm::app::object_detection::DetectionResult OdResults; + +/** + * @brief Draws a boxes in the image using the object detection results vector. + * + * @param[out] image Pointer to the start of the image. + * @param[in] width Image width. + * @param[in] height Image height. + * @param[in] results Vector of object detection results. + */ +static void DrawDetectionBoxes(uint8_t* image, + const uint32_t imageWidth, + const uint32_t imageHeight, + const std::vector& results); + +#if defined(__ARMCC_VERSION) && (__ARMCC_VERSION >= 6010050) +__asm(" .global __ARM_use_no_argv\n"); +#endif + +int main() +{ + /* Initialise the UART module to allow printf related functions (if using retarget) */ + BoardInit(); + + /* Model object creation and initialisation. */ + arm::app::YoloFastestModel model; + if (!model.Init(arm::app::tensorArena, + sizeof(arm::app::tensorArena), + arm::app::object_detection::GetModelPointer(), + arm::app::object_detection::GetModelLen())) { + printf_err("Failed to initialise model\n"); + return 1; + } + + auto initialImgIdx = 0; + + TfLiteTensor* inputTensor = model.GetInputTensor(0); + TfLiteTensor* outputTensor0 = model.GetOutputTensor(0); + TfLiteTensor* outputTensor1 = model.GetOutputTensor(1); + + if (!inputTensor->dims) { + printf_err("Invalid input tensor dims\n"); + return 1; + } else if (inputTensor->dims->size < 3) { + printf_err("Input tensor dimension should be >= 3\n"); + return 1; + } + + TfLiteIntArray* inputShape = model.GetInputShape(0); + + const int inputImgCols = inputShape->data[arm::app::YoloFastestModel::ms_inputColsIdx]; + const int inputImgRows = inputShape->data[arm::app::YoloFastestModel::ms_inputRowsIdx]; + + /* Set up pre and post-processing. */ + arm::app::DetectorPreProcess preProcess = + arm::app::DetectorPreProcess(inputTensor, true, model.IsDataSigned()); + + std::vector results; + const arm::app::object_detection::PostProcessParams postProcessParams{ + inputImgRows, + inputImgCols, + arm::app::object_detection::originalImageSize, + arm::app::object_detection::anchor1, + arm::app::object_detection::anchor2}; + arm::app::DetectorPostProcess postProcess = + arm::app::DetectorPostProcess(outputTensor0, outputTensor1, results, postProcessParams); + + const size_t imgSz = inputTensor->bytes < IMAGE_SIZE ? + inputTensor->bytes : IMAGE_SIZE; + + if (sizeof(arm::app::ImageBuf) < imgSz) { + printf_err("Image buffer is insufficient\n"); + return 1; + } + + /* Initialize Video Interface */ + if (VideoDrv_Initialize(NULL) != VIDEO_DRV_OK) { + printf_err("Failed to initialise video driver\n"); + return 1; + } + + /** + * Following section is commented out as we use VSI "camera" input by default. + * These lines can be uncommented to use VSI file interface instead - when using + * AVH in a headless environment or a remote instance. + */ +// if (VideoDrv_SetFile(VIDEO_DRV_IN0, "sample_image.png") != VIDEO_DRV_OK) { +// printf_err("Failed to set filename for video input\n"); +// return 1; +// } + /* Set Output Video file (only when using AVH - default: Display) */ +// if (VideoDrv_SetFile(VIDEO_DRV_OUT0, "output_image.png") != VIDEO_DRV_OK) { +// printf_err("Failed to set filename for video output\n"); +// return 1; +// } + + /* Configure Input Video */ + if (VideoDrv_Configure(VIDEO_DRV_IN0, IMAGE_WIDTH, IMAGE_HEIGHT, VIDEO_DRV_COLOR_RGB888, 24U) != VIDEO_DRV_OK) { + printf_err("Failed to configure video input\n"); + return 1; + } + + /* Configure Output Video */ + if (VideoDrv_Configure(VIDEO_DRV_OUT0, IMAGE_WIDTH, IMAGE_HEIGHT, VIDEO_DRV_COLOR_RGB888, 24U) != VIDEO_DRV_OK) { + printf_err("Failed to configure video output\n"); + return 1; + } + + /* Set Input Video buffer */ + if (VideoDrv_SetBuf(VIDEO_DRV_IN0, arm::app::ImageBuf, IMAGE_SIZE) != VIDEO_DRV_OK) { + printf_err("Failed to set buffer for video input\n"); + return 1; + } + /* Set Output Video buffer */ + if (VideoDrv_SetBuf(VIDEO_DRV_OUT0, arm::app::ImageOut, IMAGE_SIZE) != VIDEO_DRV_OK) { + printf_err("Failed to set buffer for video output\n"); + return 1; + } + + auto dstPtr = static_cast(inputTensor->data.uint8); + + uint32_t imgCount = 0; + void *imgFrame; + void *outFrame; + + while (true) { + VideoDrv_Status_t status; + results.clear(); + + /* Start video capture (single frame) */ + if (VideoDrv_StreamStart(VIDEO_DRV_IN0, VIDEO_DRV_MODE_SINGLE) != VIDEO_DRV_OK) { + printf_err("Failed to start video capture\n"); + return 1; + } + + /* Wait for video input frame */ + do { + status = VideoDrv_GetStatus(VIDEO_DRV_IN0); + } while (status.buf_empty != 0U); + + /* Get input video frame buffer */ + imgFrame = VideoDrv_GetFrameBuf(VIDEO_DRV_IN0); + + /* Run the pre-processing, inference and post-processing. */ + if (!preProcess.DoPreProcess(imgFrame, imgSz)) { + printf_err("Pre-processing failed.\n"); + return 1; + } + + /* Run inference over this image. */ + printf("\rImage %" PRIu32 "; ", ++imgCount); + + if (!model.RunInference()) { + printf_err("Inference failed.\n"); + return 1; + } + + if (!postProcess.DoPostProcess()) { + printf_err("Post-processing failed.\n"); + return 1; + } + + /* Release input frame */ + VideoDrv_ReleaseFrame(VIDEO_DRV_IN0); + + DrawDetectionBoxes((uint8_t *)imgFrame, inputImgCols, inputImgRows, results); + + /* Get output video frame buffer */ + outFrame = VideoDrv_GetFrameBuf(VIDEO_DRV_OUT0); + + /* Copy image frame with detection boxes to output frame buffer */ + memcpy(outFrame, imgFrame, IMAGE_SIZE); + + /* Release output frame */ + VideoDrv_ReleaseFrame(VIDEO_DRV_OUT0); + + /* Start video output (single frame) */ + VideoDrv_StreamStart(VIDEO_DRV_OUT0, VIDEO_DRV_MODE_SINGLE); + + /* Check for end of stream (when using AVH with file as Video input) */ + if (status.eos != 0U) { + while (VideoDrv_GetStatus(VIDEO_DRV_OUT0).buf_empty == 0U); + break; + } + } + + /* De-initialize Video Interface */ + VideoDrv_Uninitialize(); + + return 0; +} + +/** + * @brief Draws a box in the image using the object detection result object. + * + * @param[out] imageData Pointer to the start of the image. + * @param[in] width Image width. + * @param[in] height Image height. + * @param[in] result Object detection result. + */ +static void DrawBox(uint8_t* imageData, + const uint32_t width, + const uint32_t height, + const OdResults& result) +{ + const auto x = result.m_x0; + const auto y = result.m_y0; + const auto w = result.m_w; + const auto h = result.m_h; + + const uint32_t step = width * 3; + uint8_t* const imStart = imageData + (y * step) + (x * 3); + + uint8_t* dst_0 = imStart; + uint8_t* dst_1 = imStart + (h * step); + + for (uint32_t i = 0; i < w; ++i) { + *dst_0 = 255; + *dst_1 = 255; + + dst_0 += 3; + dst_1 += 3; + } + + dst_0 = imStart; + dst_1 = imStart + (w * 3); + + for (uint32_t j = 0; j < h; ++j) { + *dst_0 = 255; + *dst_1 = 255; + + dst_0 += step; + dst_1 += step; + } +} + +static void DrawDetectionBoxes(uint8_t* image, + const uint32_t imageWidth, + const uint32_t imageHeight, + const std::vector& results) +{ + for (const auto& result : results) { + DrawBox(image, imageWidth, imageHeight, result); + printf("Detection :: [%" PRIu32 ", %" PRIu32 + ", %" PRIu32 ", %" PRIu32 "]\n", + result.m_x0, + result.m_y0, + result.m_w, + result.m_h); + } +}