From 4ee9aa3fa5b65a2a45c7cb89b0e4b9c6972e0cbb Mon Sep 17 00:00:00 2001 From: Kiriti Date: Fri, 1 Feb 2019 11:32:57 -0800 Subject: [PATCH 1/6] Samples added to WinML Ext --- .../amd_winml/samples/README.md | 56 +++++++++++++++++++ .../amd_winml/samples/data/emotions.txt | 8 +++ .../amd_winml/samples/winML-live-emotions.gdf | 31 ++++++++++ .../amd_winml/samples/winML-live-vgg19.gdf | 33 +++++++++++ 4 files changed, 128 insertions(+) create mode 100644 amd_openvx_extensions/amd_winml/samples/data/emotions.txt create mode 100644 amd_openvx_extensions/amd_winml/samples/winML-live-emotions.gdf create mode 100644 amd_openvx_extensions/amd_winml/samples/winML-live-vgg19.gdf diff --git a/amd_openvx_extensions/amd_winml/samples/README.md b/amd_openvx_extensions/amd_winml/samples/README.md index 8ba56aa3b6..9781a0bce7 100644 --- a/amd_openvx_extensions/amd_winml/samples/README.md +++ b/amd_openvx_extensions/amd_winml/samples/README.md @@ -58,3 +58,59 @@ data modelLocation = scalar:STRING,FULL_PATH_TO\squeezenet\model.onnx:view,resul ```` data labelLocation = scalar:STRING,FULL_PATH_TO\data\Labels.txt ```` + +## Sample - FER+ Emotion Recognition + +* Download the [FER+ Emotion Recognition](https://onnxzoo.blob.core.windows.net/models/opset_8/emotion_ferplus/emotion_ferplus.tar.gz) ONNX Model +* Use [Netron](https://lutzroeder.github.io/netron/) to open the model.onnx + * Look at Model Properties to find Input & Output Tensor Name (Input3 - input; Plus692_Output_0 - output) + * Look at output tensor dimensions (n,c,h,w - [1,8] for Plus692_Output_0) +* Use the label file - [data/emotions.txt](data/emotions.txt) to run sample + +This sample is in [Graph Description Format](../../../utilities/runvx#amd-runvx) (gdf) + +#### usage +```` +runvx.exe -frames:LIVE winML-live-emotions.gdf +```` + +**NOTE:** +Make the below changes in the `winML-live-emotions.gdf` file to run the inference + +* Add full path to the FER+ Emotion Recognition ONNX model downloaded in line 16 +```` +data modelLocation = scalar:STRING,FULL_PATH_TO\emotion_ferplus\model.onnx:view,inputImageWindow +```` + +* Add full path to the data\emotions.txt provided in this folder in line 25 +```` +data labelLocation = scalar:STRING,FULL_PATH_TO\data\emotions.txt +```` + +## Sample - VGG19 + +* Download the [VGG-19](https://s3.amazonaws.com/download.onnx/models/opset_8/vgg19.tar.gz) ONNX Model +* Use [Netron](https://lutzroeder.github.io/netron/) to open the model.onnx + * Look at Model Properties to find Input & Output Tensor Name (data_0 - input; prob_1 - output) + * Look at output tensor dimensions (n,c,h,w - [1,1000] for prob_1) +* Use the label file - [data/Labels.txt](data/Labels.txt) to run sample + +This sample is in [Graph Description Format](../../../utilities/runvx#amd-runvx) (gdf) + +#### usage +```` +runvx.exe -frames:LIVE winML-live-vgg19.gdf +```` + +**NOTE:** +Make the below changes in the `winML-live-vgg19.gdf` file to run the inference + +* Add full path to the VGG-19 ONNX model downloaded in line 16 +```` +data modelLocation = scalar:STRING,FULL_PATH_TO\vgg19\model.onnx:view,inputImageWindow +```` + +* Add full path to the data\Labels.txt provided in this folder in line 25 +```` +data labelLocation = scalar:STRING,FULL_PATH_TO\data\Labels.txt +```` \ No newline at end of file diff --git a/amd_openvx_extensions/amd_winml/samples/data/emotions.txt b/amd_openvx_extensions/amd_winml/samples/data/emotions.txt new file mode 100644 index 0000000000..06cc4e4eab --- /dev/null +++ b/amd_openvx_extensions/amd_winml/samples/data/emotions.txt @@ -0,0 +1,8 @@ +0,neutral +1,happiness +2,surprise +3,sadness +4,anger +5,disgust +6,fear +7,contempt \ No newline at end of file diff --git a/amd_openvx_extensions/amd_winml/samples/winML-live-emotions.gdf b/amd_openvx_extensions/amd_winml/samples/winML-live-emotions.gdf new file mode 100644 index 0000000000..ebb8099a8d --- /dev/null +++ b/amd_openvx_extensions/amd_winml/samples/winML-live-emotions.gdf @@ -0,0 +1,31 @@ +# Get WinML Extension DLL +import vx_winml + +# Get input from camera: one for display & another for input +data input_view = image:1080,720,RGB2:camera,0:view,inputImageWindow +data input_image = image:64,64,U008:camera,0 + +data input_tensor = tensor:4,{64,64,1,1},VX_TYPE_FLOAT32,0 +data a = scalar:FLOAT32,1.0 +data b = scalar:FLOAT32,0.0 +data reverse_channel_order = scalar:BOOL,0 + +# Use convert to tensor node - convert input image to tensor +node com.winml.convert_image_to_tensor input_image input_tensor a b reverse_channel_order + +data modelLocation = scalar:STRING,FULL_PATH_TO\emotion_ferplus\model.onnx:view,inputImageWindow +data modelInputName = scalar:STRING,Input3 +data modelOutputName = scalar:STRING,Plus692_Output_0 +data output = tensor:2,{8,1},VX_TYPE_FLOAT32,0 +data deviceKind = scalar:INT32,0 + +# Use Import ONNX Node to run Inference +node com.winml.onnx_to_mivisionx modelLocation modelInputName modelOutputName input_tensor output deviceKind + +data labelLocation = scalar:STRING,FULL_PATH_TO\data\emotions.txt +data top1 = scalar:STRING,INITIALIZE:view,inputImageWindow +data top2 = scalar:STRING,INITIALIZE:view,inputImageWindow +data top3 = scalar:STRING,INITIALIZE:view,inputImageWindow + +# Use get top K label node to display results +node com.winml.get_top_k_labels output labelLocation top1 top2 top3 \ No newline at end of file diff --git a/amd_openvx_extensions/amd_winml/samples/winML-live-vgg19.gdf b/amd_openvx_extensions/amd_winml/samples/winML-live-vgg19.gdf new file mode 100644 index 0000000000..ad15ff5253 --- /dev/null +++ b/amd_openvx_extensions/amd_winml/samples/winML-live-vgg19.gdf @@ -0,0 +1,33 @@ +# Get WinML Extension DLL +import vx_winml + +# Get input from camera: one for display & another for input +data input_view = image:1080,720,RGB2:camera,0:view,inputImageWindow +data input_image = image:224,224,RGB2:camera,0 + +data input_tensor = tensor:4,{224,224,3,1},VX_TYPE_FLOAT32,0 +data a = scalar:FLOAT32,1.0 +data b = scalar:FLOAT32,0.0 +data reverse_channel_order = scalar:BOOL,0 + +# Use convert to tensor node - convert input image to tensor +node com.winml.convert_image_to_tensor input_image input_tensor a b reverse_channel_order + +data modelLocation = scalar:STRING,FULL_PATH_TO\vgg19\model.onnx:view,inputImageWindow +data modelInputName = scalar:STRING,data_0 +data modelOutputName = scalar:STRING,softmaxout_1 +data output = tensor:4,{1,1,1000,1},VX_TYPE_FLOAT32,0 +data deviceKind = scalar:INT32,0 + +# Use Import ONNX Node to run Inference +node com.winml.onnx_to_mivisionx modelLocation modelInputName modelOutputName input_tensor output deviceKind + +data labelLocation = scalar:STRING,FULL_PATH_TO\data\Labels.txt +data top1 = scalar:STRING,TOPK:view,resultWindow:view,inputImageWindow +data top2 = scalar:STRING,TOPK:view,resultWindow:view,inputImageWindow +data top3 = scalar:STRING,TOPK:view,resultWindow:view,inputImageWindow +data top4 = scalar:STRING,TOPK:view,resultWindow:view,inputImageWindow +data top5 = scalar:STRING,TOPK:view,resultWindow:view,inputImageWindow + +# Use get top K label node to display results +node com.winml.get_top_k_labels output labelLocation top1 top2 top3 top4 top5 \ No newline at end of file From e73cd3e54749118aa445eb9de537731a2dd08d3d Mon Sep 17 00:00:00 2001 From: Kiriti Date: Fri, 1 Feb 2019 11:50:56 -0800 Subject: [PATCH 2/6] Samples updated --- .../amd_winml/samples/README.md | 20 ++++++++---- ...L-live-vgg19.gdf => winML-image-vgg19.gdf} | 32 ++++++++++++------- 2 files changed, 33 insertions(+), 19 deletions(-) rename amd_openvx_extensions/amd_winml/samples/{winML-live-vgg19.gdf => winML-image-vgg19.gdf} (51%) diff --git a/amd_openvx_extensions/amd_winml/samples/README.md b/amd_openvx_extensions/amd_winml/samples/README.md index 9781a0bce7..bb9726600c 100644 --- a/amd_openvx_extensions/amd_winml/samples/README.md +++ b/amd_openvx_extensions/amd_winml/samples/README.md @@ -8,7 +8,7 @@ Get ONNX models from [ONNX Model Zoo](https://github.com/onnx/models) * Use [Netron](https://lutzroeder.github.io/netron/) to open the model.onnx * Look at Model Properties to find Input & Output Tensor Name (data_0 - input; softmaxout_1 - output) * Look at output tensor dimensions (n,c,h,w - [1,1000,1,1] for softmaxout_1) -* Use the label file - Labels.txt and sample image - car.JPEG to run samples +* Use the label file - [data\Labels.txt](data\Labels.txt) and sample image - data\car.JPEG to run samples ### winML-image.gdf - Single Image Inference @@ -32,7 +32,7 @@ read input_image FULL_PATH_TO\data\car.JPEG data modelLocation = scalar:STRING,FULL_PATH_TO\squeezenet\model.onnx:view,resultWindow ```` -* Add full path to the data\Labels.txt provided in this folder in line 34 +* Add full path to the [data\Labels.txt](data\Labels.txt) provided in this folder in line 34 ```` data labelLocation = scalar:STRING,FULL_PATH_TO\data\Labels.txt ```` @@ -54,7 +54,7 @@ Make the below changes in the `winML-live.gdf` file to run the inference data modelLocation = scalar:STRING,FULL_PATH_TO\squeezenet\model.onnx:view,resultWindow ```` -* Add full path to the data\Labels.txt provided in this folder in line 25 +* Add full path to the [data\Labels.txt](data\Labels.txt) provided in this folder in line 25 ```` data labelLocation = scalar:STRING,FULL_PATH_TO\data\Labels.txt ```` @@ -99,18 +99,24 @@ This sample is in [Graph Description Format](../../../utilities/runvx#amd-runvx) #### usage ```` -runvx.exe -frames:LIVE winML-live-vgg19.gdf +runvx.exe -v winML-image-vgg19.gdf ```` **NOTE:** Make the below changes in the `winML-live-vgg19.gdf` file to run the inference -* Add full path to the VGG-19 ONNX model downloaded in line 16 +* Add full path to the data\bird.JPEG image provided in this folder in line 11 ```` -data modelLocation = scalar:STRING,FULL_PATH_TO\vgg19\model.onnx:view,inputImageWindow +read input_image FULL_PATH_TO\data\bird.JPEG ```` -* Add full path to the data\Labels.txt provided in this folder in line 25 +* Add full path to the VGG 19 ONNX model downloaded in line 21 +```` +data modelLocation = scalar:STRING,FULL_PATH_TO\vgg19\model.onnx:view,resultWindow +```` + +* Add full path to the [data\Labels.txt](data\Labels.txt) provided in this folder in line 33 ```` data labelLocation = scalar:STRING,FULL_PATH_TO\data\Labels.txt +```` ```` \ No newline at end of file diff --git a/amd_openvx_extensions/amd_winml/samples/winML-live-vgg19.gdf b/amd_openvx_extensions/amd_winml/samples/winML-image-vgg19.gdf similarity index 51% rename from amd_openvx_extensions/amd_winml/samples/winML-live-vgg19.gdf rename to amd_openvx_extensions/amd_winml/samples/winML-image-vgg19.gdf index ad15ff5253..f92e9587b4 100644 --- a/amd_openvx_extensions/amd_winml/samples/winML-live-vgg19.gdf +++ b/amd_openvx_extensions/amd_winml/samples/winML-image-vgg19.gdf @@ -1,33 +1,41 @@ # Get WinML Extension DLL import vx_winml -# Get input from camera: one for display & another for input -data input_view = image:1080,720,RGB2:camera,0:view,inputImageWindow -data input_image = image:224,224,RGB2:camera,0 - +data result_image = image:640,150,RGB2 +data input_image = image:224,224,RGB2 data input_tensor = tensor:4,{224,224,3,1},VX_TYPE_FLOAT32,0 data a = scalar:FLOAT32,1.0 data b = scalar:FLOAT32,0.0 data reverse_channel_order = scalar:BOOL,0 +read input_image FULL_PATH_TO\data\bird.JPEG +view input_image inputImageWindow +view result_image resultWindow + # Use convert to tensor node - convert input image to tensor node com.winml.convert_image_to_tensor input_image input_tensor a b reverse_channel_order -data modelLocation = scalar:STRING,FULL_PATH_TO\vgg19\model.onnx:view,inputImageWindow +# write input tensor +write input_tensor input_tensor.f32 + +data modelLocation = scalar:STRING,FULL_PATH_TO\data\vgg19\model.onnx:view,resultWindow data modelInputName = scalar:STRING,data_0 -data modelOutputName = scalar:STRING,softmaxout_1 -data output = tensor:4,{1,1,1000,1},VX_TYPE_FLOAT32,0 +data modelOutputName = scalar:STRING,prob_1 +data output = tensor:2,{1000,1},VX_TYPE_FLOAT32,0 data deviceKind = scalar:INT32,0 # Use Import ONNX Node to run Inference node com.winml.onnx_to_mivisionx modelLocation modelInputName modelOutputName input_tensor output deviceKind +# write output tensor +write output output_tensor.f32 + data labelLocation = scalar:STRING,FULL_PATH_TO\data\Labels.txt -data top1 = scalar:STRING,TOPK:view,resultWindow:view,inputImageWindow -data top2 = scalar:STRING,TOPK:view,resultWindow:view,inputImageWindow -data top3 = scalar:STRING,TOPK:view,resultWindow:view,inputImageWindow -data top4 = scalar:STRING,TOPK:view,resultWindow:view,inputImageWindow -data top5 = scalar:STRING,TOPK:view,resultWindow:view,inputImageWindow +data top1 = scalar:STRING,TOPK:view,resultWindow +data top2 = scalar:STRING,TOPK:view,resultWindow +data top3 = scalar:STRING,TOPK:view,resultWindow +data top4 = scalar:STRING,TOPK:view,resultWindow +data top5 = scalar:STRING,TOPK:view,resultWindow # Use get top K label node to display results node com.winml.get_top_k_labels output labelLocation top1 top2 top3 top4 top5 \ No newline at end of file From 55019d8b78b1bcf7ca7b26f5a055ad4fdf6b9bdd Mon Sep 17 00:00:00 2001 From: Kiriti Gowda Date: Fri, 1 Feb 2019 11:52:45 -0800 Subject: [PATCH 3/6] Fix link to labels --- amd_openvx_extensions/amd_winml/samples/README.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/amd_openvx_extensions/amd_winml/samples/README.md b/amd_openvx_extensions/amd_winml/samples/README.md index bb9726600c..826ac426a8 100644 --- a/amd_openvx_extensions/amd_winml/samples/README.md +++ b/amd_openvx_extensions/amd_winml/samples/README.md @@ -8,7 +8,7 @@ Get ONNX models from [ONNX Model Zoo](https://github.com/onnx/models) * Use [Netron](https://lutzroeder.github.io/netron/) to open the model.onnx * Look at Model Properties to find Input & Output Tensor Name (data_0 - input; softmaxout_1 - output) * Look at output tensor dimensions (n,c,h,w - [1,1000,1,1] for softmaxout_1) -* Use the label file - [data\Labels.txt](data\Labels.txt) and sample image - data\car.JPEG to run samples +* Use the label file - [data\Labels.txt](data/Labels.txt) and sample image - data\car.JPEG to run samples ### winML-image.gdf - Single Image Inference @@ -32,7 +32,7 @@ read input_image FULL_PATH_TO\data\car.JPEG data modelLocation = scalar:STRING,FULL_PATH_TO\squeezenet\model.onnx:view,resultWindow ```` -* Add full path to the [data\Labels.txt](data\Labels.txt) provided in this folder in line 34 +* Add full path to the [data\Labels.txt](data/Labels.txt) provided in this folder in line 34 ```` data labelLocation = scalar:STRING,FULL_PATH_TO\data\Labels.txt ```` @@ -54,7 +54,7 @@ Make the below changes in the `winML-live.gdf` file to run the inference data modelLocation = scalar:STRING,FULL_PATH_TO\squeezenet\model.onnx:view,resultWindow ```` -* Add full path to the [data\Labels.txt](data\Labels.txt) provided in this folder in line 25 +* Add full path to the [data\Labels.txt](data/Labels.txt) provided in this folder in line 25 ```` data labelLocation = scalar:STRING,FULL_PATH_TO\data\Labels.txt ```` @@ -119,4 +119,4 @@ data modelLocation = scalar:STRING,FULL_PATH_TO\vgg19\model.onnx:view,resultWind ```` data labelLocation = scalar:STRING,FULL_PATH_TO\data\Labels.txt ```` -```` \ No newline at end of file +```` From 59b12a496d206d322868b31be83f2af4235c21d2 Mon Sep 17 00:00:00 2001 From: Kiriti Date: Fri, 1 Feb 2019 13:39:30 -0800 Subject: [PATCH 4/6] amd_winml.sln - default project winml --- amd_openvx_extensions/amd_winml.sln | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/amd_openvx_extensions/amd_winml.sln b/amd_openvx_extensions/amd_winml.sln index f489269e83..dcaf2853e9 100644 --- a/amd_openvx_extensions/amd_winml.sln +++ b/amd_openvx_extensions/amd_winml.sln @@ -56,7 +56,7 @@ Global HideSolutionNode = FALSE EndGlobalSection GlobalSection(ExtensibilityGlobals) = postSolution - SolutionGuid = {3FB58533-A51A-34B5-ACE3-6B1BFD797DB1} SolutionGuid = {F164F9CB-D02C-4BC0-9122-54E6CEA7DA72} + SolutionGuid = {3FB58533-A51A-34B5-ACE3-6B1BFD797DB1} EndGlobalSection EndGlobal From fedfe54adbcd05c28391852ed7b3cd818a744313 Mon Sep 17 00:00:00 2001 From: Kiriti Date: Fri, 1 Feb 2019 13:46:52 -0800 Subject: [PATCH 5/6] vx_ext_winml.h fix --- .../amd_winml/include/vx_ext_winml.h | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/amd_openvx_extensions/amd_winml/include/vx_ext_winml.h b/amd_openvx_extensions/amd_winml/include/vx_ext_winml.h index 6cb573de26..1dc6341734 100644 --- a/amd_openvx_extensions/amd_winml/include/vx_ext_winml.h +++ b/amd_openvx_extensions/amd_winml/include/vx_ext_winml.h @@ -54,12 +54,12 @@ extern "C" { * \retval vx_node A node reference. Any possible errors preventing a successful creation should be checked using \ref vxGetStatus*/ VX_API_ENTRY vx_node VX_API_CALL vxExtWinMLNode_OnnxToMivisionX ( - vx_graph graph, - vx_scalar modelLocation, - vx_scalar inputTensorName, - vx_scalar outputTensorName, - vx_tensor inputTensor, - vx_tensor outputTensor, + vx_graph graph, + vx_scalar modelLocation, + vx_scalar inputTensorName, + vx_scalar outputTensorName, + vx_tensor inputTensor, + vx_tensor outputTensor, vx_scalar deviceKind ); From 1889f175868fc2420649b5d8c3c3fd85d281029a Mon Sep 17 00:00:00 2001 From: Kiriti Gowda Date: Fri, 1 Feb 2019 13:53:02 -0800 Subject: [PATCH 6/6] fix winML images --- amd_openvx_extensions/amd_winml/README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/amd_openvx_extensions/amd_winml/README.md b/amd_openvx_extensions/amd_winml/README.md index 1a13c80ff0..41d77f731e 100644 --- a/amd_openvx_extensions/amd_winml/README.md +++ b/amd_openvx_extensions/amd_winml/README.md @@ -1,11 +1,11 @@ # AMD WinML Extension The AMD WinML (vx_winml) is an OpenVX module that implements a mechanism to access WinML functionality as OpenVX kernels. These kernels can be accessed from within OpenVX framework using OpenVX API call [vxLoadKernels](https://www.khronos.org/registry/vx/specs/1.0.1/html/da/d83/group__group__user__kernels.html#gae00b6343fbb0126e3bf0f587b09393a3)(context, "vx_winml"). -

+

WinML extension will allow developers to import a pre-trained ONNX model into an OpenVX graph and add hundreds of different pre & post processing `vision`/`generic`/`user-defined` functions, available in OpenVX and OpenCV interop, to the input and output of the neural net model. This will allow developers to build an end to end application for inference. -

+

## List of WinML-interop kernels The following is a list of WinML functions that have been included in the vx_winml module.