diff --git a/.DS_Store b/.DS_Store new file mode 100644 index 000000000..dbc827b02 Binary files /dev/null and b/.DS_Store differ diff --git a/.gitignore b/.gitignore index 112e91394..869b5a3a2 100644 --- a/.gitignore +++ b/.gitignore @@ -2,6 +2,7 @@ **/.vs/ +**/.vscode/ build/ config.tests/*/.qmake.stash config.tests/*/Makefile diff --git a/.gitmodules b/.gitmodules index 0b50efae7..4e39dbf33 100644 --- a/.gitmodules +++ b/.gitmodules @@ -17,3 +17,6 @@ path = libs url = https://github.com/cgutman/moonlight-qt-prebuilts.git shallow = true +[submodule "third-party/AMF"] + path = third-party/AMF + url = https://github.com/GPUOpen-LibrariesAndSDKs/AMF.git diff --git a/app/.DS_Store b/app/.DS_Store new file mode 100644 index 000000000..8e60760f5 Binary files /dev/null and b/app/.DS_Store differ diff --git a/app/SDL_GameControllerDB b/app/SDL_GameControllerDB index e5a5fa2ac..b4001f8b2 160000 --- a/app/SDL_GameControllerDB +++ b/app/SDL_GameControllerDB @@ -1 +1 @@ -Subproject commit e5a5fa2ac6e645d72c619ea99520a3a4586ee005 +Subproject commit b4001f8b2dd55a7225c736dae043ab4e737bf0ff diff --git a/app/app.pro b/app/app.pro index cbdb9fbba..93e532363 100644 --- a/app/app.pro +++ b/app/app.pro @@ -165,7 +165,7 @@ macx { CONFIG += discord-rpc } - LIBS += -lobjc -framework VideoToolbox -framework AVFoundation -framework CoreVideo -framework CoreGraphics -framework CoreMedia -framework AppKit -framework Metal -framework QuartzCore + LIBS += -lobjc -framework VideoToolbox -framework AVFoundation -framework CoreVideo -framework CoreGraphics -framework CoreMedia -framework AppKit -framework Metal -framework MetalFx -framework QuartzCore # For libsoundio LIBS += -framework CoreAudio -framework AudioUnit @@ -211,6 +211,7 @@ SOURCES += \ gui/sdlgamepadkeynavigation.cpp \ streaming/video/overlaymanager.cpp \ backend/systemproperties.cpp \ + streaming/video/videoenhancement.cpp \ wm.cpp HEADERS += \ @@ -219,6 +220,7 @@ HEADERS += \ cli/pair.h \ settings/compatfetcher.h \ settings/mappingfetcher.h \ + streaming/video/videoenhancement.h \ utils.h \ backend/computerseeker.h \ backend/identitymanager.h \ @@ -402,6 +404,18 @@ win32:!winrt { streaming/video/ffmpeg-renderers/d3d11va.h \ streaming/video/ffmpeg-renderers/pacer/dxvsyncsource.h } +win32:!winrt { + message(AMF enabled for AMD Drivers) + + SOURCES += \ + ../third-party/AMF/amf/public/common/AMFFactory.cpp \ + ../third-party/AMF/amf/public/common/AMFSTL.cpp \ + ../third-party/AMF/amf/public/common/Thread.cpp \ + ../third-party/AMF/amf/public/common/TraceAdapter.cpp \ + ../third-party/AMF/amf/public/common/Windows/ThreadWindows.cpp + + INCLUDEPATH += $$PWD/../third-party/AMF/amf +} macx { message(VideoToolbox renderer selected) diff --git a/app/backend/systemproperties.cpp b/app/backend/systemproperties.cpp index 554f43348..32219e0c2 100644 --- a/app/backend/systemproperties.cpp +++ b/app/backend/systemproperties.cpp @@ -6,6 +6,7 @@ #include "streaming/session.h" #include "streaming/streamutils.h" +#include "streaming/video/videoenhancement.h" #ifdef Q_OS_WIN32 #define WIN32_LEAN_AND_MEAN @@ -248,3 +249,27 @@ void SystemProperties::refreshDisplaysInternal() SDL_QuitSubSystem(SDL_INIT_VIDEO); } + +/** + * \brief Inform if the GPU is capable of Video enhancement + * + * Check if either Video Super-Resolution or SDR-to-HDR features can be used by the GPU. + * + * \return bool Returns true if the GPU is capable + */ +bool SystemProperties::isVideoEnhancementCapable() +{ + VideoEnhancement* videoEnhancement = &VideoEnhancement::getInstance(); + return videoEnhancement->isUIvisible() && (videoEnhancement->isVSRcapable() || videoEnhancement->isHDRcapable()); +} + +/** + * \brief Inform if the GPU's driver is at an experiemental state of Video enhancement implementation + * + * \return bool Returns true if it is experimental yet + */ +bool SystemProperties::isVideoEnhancementExperimental() +{ + VideoEnhancement* videoEnhancement = &VideoEnhancement::getInstance(); + return videoEnhancement->isExperimental(); +} diff --git a/app/backend/systemproperties.h b/app/backend/systemproperties.h index dd4ec6ce9..f1eb7cb34 100644 --- a/app/backend/systemproperties.h +++ b/app/backend/systemproperties.h @@ -32,6 +32,8 @@ class SystemProperties : public QObject Q_INVOKABLE QRect getNativeResolution(int displayIndex); Q_INVOKABLE QRect getSafeAreaResolution(int displayIndex); Q_INVOKABLE int getRefreshRate(int displayIndex); + Q_INVOKABLE bool isVideoEnhancementCapable(); + Q_INVOKABLE bool isVideoEnhancementExperimental(); signals: void unmappedGamepadsChanged(); diff --git a/app/cli/commandlineparser.cpp b/app/cli/commandlineparser.cpp index 0e50a75ba..dfe8e6cb8 100644 --- a/app/cli/commandlineparser.cpp +++ b/app/cli/commandlineparser.cpp @@ -367,6 +367,7 @@ void StreamCommandLineParser::parse(const QStringList &args, StreamingPreference parser.addToggleOption("game-optimization", "game optimizations"); parser.addToggleOption("audio-on-host", "audio on host PC"); parser.addToggleOption("frame-pacing", "frame pacing"); + parser.addToggleOption("video-enhancement", "Enhance video with AI"); parser.addToggleOption("mute-on-focus-loss", "mute audio when Moonlight window loses focus"); parser.addToggleOption("background-gamepad", "background gamepad input"); parser.addToggleOption("reverse-scroll-direction", "inverted scroll direction"); @@ -474,6 +475,9 @@ void StreamCommandLineParser::parse(const QStringList &args, StreamingPreference // Resolve --frame-pacing and --no-frame-pacing options preferences->framePacing = parser.getToggleOptionValue("frame-pacing", preferences->framePacing); + // Resolve --video-enhancement and --no-video-enhancement options + preferences->videoEnhancement = parser.getToggleOptionValue("video-enhancement", preferences->videoEnhancement); + // Resolve --mute-on-focus-loss and --no-mute-on-focus-loss options preferences->muteOnFocusLoss = parser.getToggleOptionValue("mute-on-focus-loss", preferences->muteOnFocusLoss); diff --git a/app/gui/SettingsView.qml b/app/gui/SettingsView.qml index 30b9ed787..6f87bbf67 100644 --- a/app/gui/SettingsView.qml +++ b/app/gui/SettingsView.qml @@ -820,6 +820,44 @@ Flickable { ToolTip.visible: hovered ToolTip.text: qsTr("Frame pacing reduces micro-stutter by delaying frames that come in too early") } + + CheckBox { + id: videoEnhancementCheck + width: parent.width + hoverEnabled: true + text: qsTr("Video AI-Enhancement") + font.pointSize: 12 + enabled: SystemProperties.isVideoEnhancementCapable() + checked: { + return SystemProperties.isVideoEnhancementCapable() && StreamingPreferences.videoEnhancement + } + property bool keepValue: checked; + onCheckedChanged: { + StreamingPreferences.videoEnhancement = checked + } + ToolTip.delay: 1000 + ToolTip.timeout: 5000 + ToolTip.visible: hovered + ToolTip.text: + qsTr("Enhance video quality by utilizing the GPU's AI-Enhancement capabilities.") + + qsTr("\nThis feature effectively upscales, reduces compression artifacts and enhances the clarity of streamed content.") + + qsTr("\nNote:") + + qsTr("\n - If available, ensure that appropriate settings (i.e. RTX Video enhancement) are enabled in your GPU driver configuration.") + + qsTr("\n - HDR rendering has divers issues depending on the GPU used, we are working on it but we advise to currently use Non-HDR.") + + qsTr("\n - Be advised that using this feature on laptops running on battery power may lead to significant battery drain.") + + Component.onCompleted: { + if (!SystemProperties.isVideoEnhancementCapable()){ + // VSR or SDR->HDR feature could not be initialized by any GPU available + text = qsTr("Video AI-Enhancement (Not supported by the GPU)") + enabled = false; + checked = false; + } else if(SystemProperties.isVideoEnhancementExperimental()){ + // Indicate if the feature is available but not officially deployed by the Vendor + text = qsTr("Video AI-Enhancement (Experimental)") + } + } + } } } @@ -1523,6 +1561,16 @@ Flickable { StreamingPreferences.videoDecoderSelection = decoderListModel.get(currentIndex).val } } + onCurrentIndexChanged: { + if(decoderListModel.get(currentIndex).val === StreamingPreferences.VDS_FORCE_SOFTWARE){ + videoEnhancementCheck.enabled = false; + videoEnhancementCheck.keepValue = videoEnhancementCheck.checked; + videoEnhancementCheck.checked = false; + } else { + videoEnhancementCheck.enabled = true; + videoEnhancementCheck.checked = videoEnhancementCheck.keepValue; + } + } } Label { diff --git a/app/settings/streamingpreferences.cpp b/app/settings/streamingpreferences.cpp index bb34f6269..55d212484 100644 --- a/app/settings/streamingpreferences.cpp +++ b/app/settings/streamingpreferences.cpp @@ -33,6 +33,7 @@ #define SER_ABSTOUCHMODE "abstouchmode" #define SER_STARTWINDOWED "startwindowed" #define SER_FRAMEPACING "framepacing" +#define SER_VIDEOENHANCEMENT "videoenhancement" #define SER_CONNWARNINGS "connwarnings" #define SER_UIDISPLAYMODE "uidisplaymode" #define SER_RICHPRESENCE "richpresence" @@ -131,6 +132,7 @@ void StreamingPreferences::reload() absoluteMouseMode = settings.value(SER_ABSMOUSEMODE, false).toBool(); absoluteTouchMode = settings.value(SER_ABSTOUCHMODE, true).toBool(); framePacing = settings.value(SER_FRAMEPACING, false).toBool(); + videoEnhancement = settings.value(SER_VIDEOENHANCEMENT, false).toBool(); connectionWarnings = settings.value(SER_CONNWARNINGS, true).toBool(); richPresence = settings.value(SER_RICHPRESENCE, true).toBool(); gamepadMouse = settings.value(SER_GAMEPADMOUSE, true).toBool(); @@ -321,6 +323,7 @@ void StreamingPreferences::save() settings.setValue(SER_ABSMOUSEMODE, absoluteMouseMode); settings.setValue(SER_ABSTOUCHMODE, absoluteTouchMode); settings.setValue(SER_FRAMEPACING, framePacing); + settings.setValue(SER_VIDEOENHANCEMENT, videoEnhancement); settings.setValue(SER_CONNWARNINGS, connectionWarnings); settings.setValue(SER_RICHPRESENCE, richPresence); settings.setValue(SER_GAMEPADMOUSE, gamepadMouse); diff --git a/app/settings/streamingpreferences.h b/app/settings/streamingpreferences.h index 3ca216fff..972fb0269 100644 --- a/app/settings/streamingpreferences.h +++ b/app/settings/streamingpreferences.h @@ -119,6 +119,7 @@ class StreamingPreferences : public QObject Q_PROPERTY(bool absoluteMouseMode MEMBER absoluteMouseMode NOTIFY absoluteMouseModeChanged) Q_PROPERTY(bool absoluteTouchMode MEMBER absoluteTouchMode NOTIFY absoluteTouchModeChanged) Q_PROPERTY(bool framePacing MEMBER framePacing NOTIFY framePacingChanged) + Q_PROPERTY(bool videoEnhancement MEMBER videoEnhancement NOTIFY videoEnhancementChanged) Q_PROPERTY(bool connectionWarnings MEMBER connectionWarnings NOTIFY connectionWarningsChanged) Q_PROPERTY(bool richPresence MEMBER richPresence NOTIFY richPresenceChanged) Q_PROPERTY(bool gamepadMouse MEMBER gamepadMouse NOTIFY gamepadMouseChanged) @@ -158,6 +159,7 @@ class StreamingPreferences : public QObject bool absoluteMouseMode; bool absoluteTouchMode; bool framePacing; + bool videoEnhancement; bool connectionWarnings; bool richPresence; bool gamepadMouse; @@ -202,6 +204,7 @@ class StreamingPreferences : public QObject void uiDisplayModeChanged(); void windowModeChanged(); void framePacingChanged(); + void videoEnhancementChanged(); void connectionWarningsChanged(); void richPresenceChanged(); void gamepadMouseChanged(); diff --git a/app/streaming/.DS_Store b/app/streaming/.DS_Store new file mode 100644 index 000000000..9e9935d5c Binary files /dev/null and b/app/streaming/.DS_Store differ diff --git a/app/streaming/session.cpp b/app/streaming/session.cpp index b6f1a0251..2ceca733f 100644 --- a/app/streaming/session.cpp +++ b/app/streaming/session.cpp @@ -261,7 +261,7 @@ void Session::clSetControllerLED(uint16_t controllerNumber, uint8_t r, uint8_t g bool Session::chooseDecoder(StreamingPreferences::VideoDecoderSelection vds, SDL_Window* window, int videoFormat, int width, int height, - int frameRate, bool enableVsync, bool enableFramePacing, bool testOnly, IVideoDecoder*& chosenDecoder) + int frameRate, bool enableVsync, bool enableFramePacing, bool enableVideoEnhancement, bool testOnly, IVideoDecoder*& chosenDecoder) { DECODER_PARAMETERS params; @@ -277,6 +277,7 @@ bool Session::chooseDecoder(StreamingPreferences::VideoDecoderSelection vds, params.window = window; params.enableVsync = enableVsync; params.enableFramePacing = enableFramePacing; + params.enableVideoEnhancement = enableVideoEnhancement; params.testOnly = testOnly; params.vds = vds; @@ -381,7 +382,7 @@ void Session::getDecoderInfo(SDL_Window* window, // Try an HEVC Main10 decoder first to see if we have HDR support if (chooseDecoder(StreamingPreferences::VDS_FORCE_HARDWARE, window, VIDEO_FORMAT_H265_MAIN10, 1920, 1080, 60, - false, false, true, decoder)) { + false, false, false, true, decoder)) { isHardwareAccelerated = decoder->isHardwareAccelerated(); isFullScreenOnly = decoder->isAlwaysFullScreen(); isHdrSupported = decoder->isHdrSupported(); @@ -394,7 +395,7 @@ void Session::getDecoderInfo(SDL_Window* window, // Try an AV1 Main10 decoder next to see if we have HDR support if (chooseDecoder(StreamingPreferences::VDS_FORCE_HARDWARE, window, VIDEO_FORMAT_AV1_MAIN10, 1920, 1080, 60, - false, false, true, decoder)) { + false, false, false, true, decoder)) { // If we've got a working AV1 Main 10-bit decoder, we'll enable the HDR checkbox // but we will still continue probing to get other attributes for HEVC or H.264 // decoders. See the AV1 comment at the top of the function for more info. @@ -406,10 +407,10 @@ void Session::getDecoderInfo(SDL_Window* window, // that supports HDR rendering with software decoded frames. if (chooseDecoder(StreamingPreferences::VDS_FORCE_SOFTWARE, window, VIDEO_FORMAT_H265_MAIN10, 1920, 1080, 60, - false, false, true, decoder) || + false, false, false, true, decoder) || chooseDecoder(StreamingPreferences::VDS_FORCE_SOFTWARE, window, VIDEO_FORMAT_AV1_MAIN10, 1920, 1080, 60, - false, false, true, decoder)) { + false, false, false, true, decoder)) { isHdrSupported = decoder->isHdrSupported(); delete decoder; } @@ -423,7 +424,7 @@ void Session::getDecoderInfo(SDL_Window* window, // Try a regular hardware accelerated HEVC decoder now if (chooseDecoder(StreamingPreferences::VDS_FORCE_HARDWARE, window, VIDEO_FORMAT_H265, 1920, 1080, 60, - false, false, true, decoder)) { + false, false, false, true, decoder)) { isHardwareAccelerated = decoder->isHardwareAccelerated(); isFullScreenOnly = decoder->isAlwaysFullScreen(); maxResolution = decoder->getDecoderMaxResolution(); @@ -450,7 +451,7 @@ void Session::getDecoderInfo(SDL_Window* window, // This will fall back to software decoding, so it should always work. if (chooseDecoder(StreamingPreferences::VDS_AUTO, window, VIDEO_FORMAT_H264, 1920, 1080, 60, - false, false, true, decoder)) { + false, false, false, true, decoder)) { isHardwareAccelerated = decoder->isHardwareAccelerated(); isFullScreenOnly = decoder->isAlwaysFullScreen(); maxResolution = decoder->getDecoderMaxResolution(); @@ -470,7 +471,7 @@ Session::getDecoderAvailability(SDL_Window* window, { IVideoDecoder* decoder; - if (!chooseDecoder(vds, window, videoFormat, width, height, frameRate, false, false, true, decoder)) { + if (!chooseDecoder(vds, window, videoFormat, width, height, frameRate, false, false, false, true, decoder)) { return DecoderAvailability::None; } @@ -491,7 +492,7 @@ bool Session::populateDecoderProperties(SDL_Window* window) m_StreamConfig.width, m_StreamConfig.height, m_StreamConfig.fps, - false, false, true, decoder)) { + false, false, false, true, decoder)) { return false; } @@ -2219,6 +2220,7 @@ void Session::execInternal() m_ActiveVideoHeight, m_ActiveVideoFrameRate, enableVsync, enableVsync && m_Preferences->framePacing, + m_Preferences->videoEnhancement, false, s_ActiveSession->m_VideoDecoder)) { SDL_AtomicUnlock(&m_DecoderLock); diff --git a/app/streaming/session.h b/app/streaming/session.h index 95858437e..05ca04f08 100644 --- a/app/streaming/session.h +++ b/app/streaming/session.h @@ -187,7 +187,7 @@ class Session : public QObject bool chooseDecoder(StreamingPreferences::VideoDecoderSelection vds, SDL_Window* window, int videoFormat, int width, int height, int frameRate, bool enableVsync, bool enableFramePacing, - bool testOnly, + bool enableVideoEnhancement, bool testOnly, IVideoDecoder*& chosenDecoder); static diff --git a/app/streaming/video/.DS_Store b/app/streaming/video/.DS_Store new file mode 100644 index 000000000..133237bb4 Binary files /dev/null and b/app/streaming/video/.DS_Store differ diff --git a/app/streaming/video/decoder.h b/app/streaming/video/decoder.h index 24708d828..f3c8ef76e 100644 --- a/app/streaming/video/decoder.h +++ b/app/streaming/video/decoder.h @@ -42,6 +42,7 @@ typedef struct _DECODER_PARAMETERS { int frameRate; bool enableVsync; bool enableFramePacing; + bool enableVideoEnhancement; bool testOnly; } DECODER_PARAMETERS, *PDECODER_PARAMETERS; diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp index 201eb7acc..c429808ab 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp @@ -7,6 +7,22 @@ #include "streaming/streamutils.h" #include "streaming/session.h" +#include "streaming/video/videoenhancement.h" + +#include "public/common/AMFFactory.h" +#include "public/include/core/Platform.h" +// Video upscaling & Sharpening +#include "public/include/components/HQScaler.h" + +#include +#include +#include +#include +#include + +extern "C" { +#include +} #include #include @@ -15,6 +31,9 @@ using Microsoft::WRL::ComPtr; +#pragma comment(lib, "d3d11.lib") +#pragma comment(lib, "dxgi.lib") + // Standard DXVA GUIDs for HEVC RExt profiles (redefined for compatibility with pre-24H2 SDKs) DEFINE_GUID(k_D3D11_DECODER_PROFILE_HEVC_VLD_MAIN_444, 0x4008018f, 0xf537, 0x4b36, 0x98, 0xcf, 0x61, 0xaf, 0x8a, 0x2c, 0x1a, 0x33); DEFINE_GUID(k_D3D11_DECODER_PROFILE_HEVC_VLD_MAIN10_444, 0x0dabeffa, 0x4458, 0x4602, 0xbc, 0x03, 0x07, 0x95, 0x65, 0x9d, 0x61, 0x7c); @@ -88,19 +107,28 @@ static const std::array k_Vi D3D11VARenderer::D3D11VARenderer(int decoderSelectionPass) : m_DecoderSelectionPass(decoderSelectionPass), - m_DevicesWithFL11Support(0), m_DevicesWithCodecSupport(0), + m_DevicesWithFL11Support(0), m_LastColorSpace(-1), m_LastFullRange(false), m_LastColorTrc(AVCOL_TRC_UNSPECIFIED), m_AllowTearing(false), m_OverlayLock(0), m_HwDeviceContext(nullptr), - m_HwFramesContext(nullptr) + m_HwFramesContext(nullptr), + m_AmfContext(nullptr), + m_AmfSurface(nullptr), + m_AmfData(nullptr), + m_AmfUpScaler(nullptr), + m_AmfInitialized(false) { + // RtlZeroMemory(m_VideoTextureResourceViews, sizeof(m_VideoTextureResourceViews)); + m_ContextLock = SDL_CreateMutex(); DwmEnableMMCSS(TRUE); + + m_VideoEnhancement = &VideoEnhancement::getInstance(); } D3D11VARenderer::~D3D11VARenderer() @@ -139,6 +167,34 @@ D3D11VARenderer::~D3D11VARenderer() m_RenderTargetView.Reset(); m_SwapChain.Reset(); + // cleanup AMF instances + if(m_AmfUpScaler){ + // Up Scaler + m_AmfUpScaler->Terminate(); + m_AmfUpScaler = nullptr; + } + if(m_AmfContext){ + // Context + m_AmfContext->Terminate(); + m_AmfContext = nullptr; + } + + g_AMFFactory.Terminate(); + + if(m_VideoProcessorEnumerator){ + m_VideoProcessorEnumerator.Reset(); + } + if(m_VideoProcessor){ + m_VideoProcessor.Reset(); + } + +#ifdef QT_DEBUG + ComPtr debugDevice; + if(FAILED(m_Device->QueryInterface(__uuidof(ID3D11Debug), reinterpret_cast(debugDevice.GetAddressOf())))) { + debugDevice = nullptr; + } +#endif + av_buffer_unref(&m_HwFramesContext); av_buffer_unref(&m_HwDeviceContext); @@ -148,24 +204,145 @@ D3D11VARenderer::~D3D11VARenderer() m_DeviceContext->Flush(); } +// Uncomment the lines in the QT_DEBUG section if you need to debug DirectX objects +#ifdef QT_DEBUG + // if(debugDevice) { + // debugDevice->ReportLiveDeviceObjects(D3D11_RLDO_IGNORE_INTERNAL); + // } + // CComPtr pDebugDevice; + // if (SUCCEEDED(DXGIGetDebugInterface1(0, IID_PPV_ARGS(&pDebugDevice)))) + // { + // pDebugDevice->ReportLiveObjects(DXGI_DEBUG_DX, DXGI_DEBUG_RLO_FLAGS(DXGI_DEBUG_RLO_DETAIL | DXGI_DEBUG_RLO_IGNORE_INTERNAL)); + // } +#endif + m_Device.Reset(); m_DeviceContext.Reset(); m_Factory.Reset(); } +/** + * \brief Set HDR MetaData information for Stream and Output + * + * Get the HDR MetaData via LimeLight library sent by Sunshine to apply to the Stream. + * Get the monitor HDR MetaData where the application is running to apply to the Output. + * + * \param bool enabled At true it enables the HDR settings + * \return void + */ +void D3D11VARenderer::setHdrMode(bool enabled){ + + // m_VideoProcessor needs to be available to be set, + // and it makes sense only when HDR is enabled from the UI + if(!enabled || !m_VideoProcessor || !(m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT)) + return; + + DXGI_HDR_METADATA_HDR10 streamHDRMetaData; + DXGI_HDR_METADATA_HDR10 outputHDRMetaData; + + // Prepare HDR Meta Data for Streamed content + bool streamSet = false; + SS_HDR_METADATA hdrMetadata; + if (LiGetHdrMetadata(&hdrMetadata)) { + streamHDRMetaData.RedPrimary[0] = hdrMetadata.displayPrimaries[0].x; + streamHDRMetaData.RedPrimary[1] = hdrMetadata.displayPrimaries[0].y; + streamHDRMetaData.GreenPrimary[0] = hdrMetadata.displayPrimaries[1].x; + streamHDRMetaData.GreenPrimary[1] = hdrMetadata.displayPrimaries[1].y; + streamHDRMetaData.BluePrimary[0] = hdrMetadata.displayPrimaries[2].x; + streamHDRMetaData.BluePrimary[1] = hdrMetadata.displayPrimaries[2].y; + streamHDRMetaData.WhitePoint[0] = hdrMetadata.whitePoint.x; + streamHDRMetaData.WhitePoint[1] = hdrMetadata.whitePoint.y; + streamHDRMetaData.MaxMasteringLuminance = hdrMetadata.maxDisplayLuminance; + streamHDRMetaData.MinMasteringLuminance = hdrMetadata.minDisplayLuminance; + + // As the Content is unknown since it is streamed, MaxCLL and MaxFALL cannot be evaluated from the source on the fly, + // therefore streamed source returns 0 as value for both. We can safetly set them to 0. + streamHDRMetaData.MaxContentLightLevel = 0; + streamHDRMetaData.MaxFrameAverageLightLevel = 0; + + // Set HDR Stream (input) Meta data + m_VideoContext->VideoProcessorSetStreamHDRMetaData( + m_VideoProcessor.Get(), + 0, + DXGI_HDR_METADATA_TYPE_HDR10, + sizeof(DXGI_HDR_METADATA_HDR10), + &streamHDRMetaData + ); + + streamSet = true; + } + SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, + "Set stream HDR mode: %s", streamSet ? "enabled" : "disabled"); + + // Prepare HDR Meta Data to match the monitor HDR specifications + // Retreive the monitor HDR metadata where the application is displayed + int appAdapterIndex = 0; + int appOutputIndex = 0; + bool displaySet = false; + if (SDL_DXGIGetOutputInfo(SDL_GetWindowDisplayIndex(m_DecoderParams.window), &appAdapterIndex, &appOutputIndex)){ + ComPtr adapter; + ComPtr output; + UINT outputIndex = appOutputIndex; + if(SUCCEEDED(m_Factory->EnumAdapters1(appAdapterIndex, &adapter))){ + if(SUCCEEDED(adapter->EnumOutputs(outputIndex, &output))){ + ComPtr output6; + if (SUCCEEDED(output->QueryInterface(__uuidof(IDXGIOutput6), (void**)&output6))) { + DXGI_OUTPUT_DESC1 desc1; + if (output6) { + output6->GetDesc1(&desc1); + // Magic constants to convert to fixed point. + // https://docs.microsoft.com/en-us/windows/win32/api/dxgi1_5/ns-dxgi1_5-dxgi_hdr_metadata_hdr10 + static constexpr int kPrimariesFixedPoint = 50000; + static constexpr int kMinLuminanceFixedPoint = 10000; + + // Format Monitor HDR MetaData + outputHDRMetaData.RedPrimary[0] = desc1.RedPrimary[0] * kPrimariesFixedPoint; + outputHDRMetaData.RedPrimary[1] = desc1.RedPrimary[1] * kPrimariesFixedPoint; + outputHDRMetaData.GreenPrimary[0] = desc1.GreenPrimary[0] * kPrimariesFixedPoint; + outputHDRMetaData.GreenPrimary[1] = desc1.GreenPrimary[1] * kPrimariesFixedPoint; + outputHDRMetaData.BluePrimary[0] = desc1.BluePrimary[0] * kPrimariesFixedPoint; + outputHDRMetaData.BluePrimary[1] = desc1.BluePrimary[1] * kPrimariesFixedPoint; + outputHDRMetaData.WhitePoint[0] = desc1.WhitePoint[0] * kPrimariesFixedPoint; + outputHDRMetaData.WhitePoint[1] = desc1.WhitePoint[1] * kPrimariesFixedPoint; + outputHDRMetaData.MaxMasteringLuminance = desc1.MaxLuminance; + outputHDRMetaData.MinMasteringLuminance = desc1.MinLuminance * kMinLuminanceFixedPoint; + // Set it the same as streamed source which is 0 by default as it cannot be evaluated on the fly. + outputHDRMetaData.MaxContentLightLevel = 0; + outputHDRMetaData.MaxFrameAverageLightLevel = 0; + + // Prepare HDR for the OutPut Monitor + m_VideoContext->VideoProcessorSetOutputHDRMetaData( + m_VideoProcessor.Get(), + DXGI_HDR_METADATA_TYPE_HDR10, + sizeof(DXGI_HDR_METADATA_HDR10), + &outputHDRMetaData + ); + + displaySet = true; + } + } + } + } + } + SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, + "Set display HDR mode: %s", displaySet ? "enabled" : "disabled"); + +} + bool D3D11VARenderer::createDeviceByAdapterIndex(int adapterIndex, bool* adapterNotFound) { const D3D_FEATURE_LEVEL supportedFeatureLevels[] = { D3D_FEATURE_LEVEL_11_1, D3D_FEATURE_LEVEL_11_0 }; bool success = false; ComPtr adapter; DXGI_ADAPTER_DESC1 adapterDesc; + ComPtr pMultithread; D3D_FEATURE_LEVEL featureLevel; HRESULT hr; SDL_assert(!m_Device); SDL_assert(!m_DeviceContext); - hr = m_Factory->EnumAdapters1(adapterIndex, &adapter); + hr = m_Factory->EnumAdapters1(adapterIndex, adapter.GetAddressOf()); if (hr == DXGI_ERROR_NOT_FOUND) { // Expected at the end of enumeration goto Exit; @@ -197,13 +374,18 @@ bool D3D11VARenderer::createDeviceByAdapterIndex(int adapterIndex, bool* adapter adapterDesc.VendorId, adapterDesc.DeviceId); + // D3D11_CREATE_DEVICE_DEBUG generates more information about DirectX11 objects for debugging. + // https://seanmiddleditch.github.io/direct3d-11-debug-api-tricks/ + // Notes: + // * ID3D11Device Refcount: 2 => This is a normal behavior as debugDevice still need m_Device to work + // * For any other object, Refcount: 0, We can ignore IntRef value hr = D3D11CreateDevice(adapter.Get(), D3D_DRIVER_TYPE_UNKNOWN, nullptr, D3D11_CREATE_DEVICE_VIDEO_SUPPORT - #ifdef QT_DEBUG +#ifdef QT_DEBUG | D3D11_CREATE_DEVICE_DEBUG - #endif +#endif , supportedFeatureLevels, ARRAYSIZE(supportedFeatureLevels), @@ -230,6 +412,18 @@ bool D3D11VARenderer::createDeviceByAdapterIndex(int adapterIndex, bool* adapter m_DevicesWithFL11Support++; } + // Avoid the application to crash in case of multithread conflict on the same resource + if(SUCCEEDED(m_Device->QueryInterface(__uuidof(ID3D11Multithread), (void**)&pMultithread))) + { + pMultithread->SetMultithreadProtected(true); + } + + if(m_VideoEnhancement->isVideoEnhancementEnabled() && !createVideoProcessor()){ + // Disable enhancement if the Video Processor creation failed + m_VideoEnhancement->enableVideoEnhancement(false); + SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "VideoProcessor failed to be created"); + } + bool ok; m_BindDecoderOutputTextures = !!qEnvironmentVariableIntValue("D3D11VA_FORCE_BIND", &ok); if (!ok) { @@ -282,6 +476,13 @@ bool D3D11VARenderer::createDeviceByAdapterIndex(int adapterIndex, bool* adapter if (!checkDecoderSupport(adapter.Get())) { m_DeviceContext.Reset(); m_Device.Reset(); + if(m_VideoProcessorEnumerator){ + m_VideoProcessorEnumerator.Reset(); + } + if(m_VideoProcessor){ + m_VideoProcessor.Reset(); + } + goto Exit; } else { @@ -298,13 +499,494 @@ bool D3D11VARenderer::createDeviceByAdapterIndex(int adapterIndex, bool* adapter return success; } +/** + * \brief Get the Adapter Index based on Video enhancement capabilities + * + * In case of multiple GPUs, get the most appropriate GPU available based on accessible capabilities + * and priority of Vendor implementation status (NVIDIA -> AMD -> Intel -> Others). + * + * \return int Returns an Adapter index + */ +int D3D11VARenderer::getAdapterIndexByEnhancementCapabilities() +{ + ComPtr adapter; + DXGI_ADAPTER_DESC1 adapterDesc; + + int highestScore = -1; + int adapterIndex = -1; + int index = 0; + while(m_Factory->EnumAdapters1(index, adapter.GetAddressOf()) != DXGI_ERROR_NOT_FOUND) + { + if (SUCCEEDED(adapter->GetDesc1(&adapterDesc))) { + + if (adapterDesc.Flags & DXGI_ADAPTER_FLAG_SOFTWARE) { + // Skip the WARP device. We know it will fail. + index++; + continue; + } + + m_DeviceContext.Reset(); + m_Device.Reset(); + if(m_VideoProcessorEnumerator){ + m_VideoProcessorEnumerator.Reset(); + } + if(m_VideoProcessor){ + m_VideoProcessor.Reset(); + } + + if (SUCCEEDED(D3D11CreateDevice( + adapter.Get(), + D3D_DRIVER_TYPE_UNKNOWN, + nullptr, + D3D11_CREATE_DEVICE_VIDEO_SUPPORT, + nullptr, + 0, + D3D11_SDK_VERSION, + &m_Device, + nullptr, + &m_DeviceContext)) + && createVideoProcessor()){ + + // VSR has the priority over SDR-to-HDR in term of capability we want to use. + // The priority value may change over the time, + // below statement has been established based on drivers' capabilities status by February 29th 2024. + + int score = -1; + + // Video Super Resolution + if(m_VideoEnhancement->isVendorAMD(adapterDesc.VendorId) && enableAMDVideoSuperResolution(false, false)){ + score = std::max(score, 200); + } else if(m_VideoEnhancement->isVendorIntel(adapterDesc.VendorId) && enableIntelVideoSuperResolution(false, false)){ + score = std::max(score, 100); + } else if(m_VideoEnhancement->isVendorNVIDIA(adapterDesc.VendorId) && enableNvidiaVideoSuperResolution(false, false)){ + score = std::max(score, 300); + } + + // SDR to HDR auto conversion + if(m_VideoEnhancement->isVendorAMD(adapterDesc.VendorId) && enableAMDHDR(false, false)){ + score = std::max(score, 20); + } else if(m_VideoEnhancement->isVendorIntel(adapterDesc.VendorId) && enableIntelHDR(false, false)){ + score = std::max(score, 10); + } else if(m_VideoEnhancement->isVendorNVIDIA(adapterDesc.VendorId) && enableNvidiaHDR(false, false)){ + score = std::max(score, 30); + } + + // Recording the highest score, which will represent the most capable adapater for Video enhancement + if(score > highestScore){ + highestScore = score; + adapterIndex = index; + } + } + } + + index++; + } + + // Set Video enhancement information + if(m_Factory->EnumAdapters1(adapterIndex, adapter.GetAddressOf()) != DXGI_ERROR_NOT_FOUND){ + + if (SUCCEEDED(adapter->GetDesc1(&adapterDesc))) { + + m_DeviceContext.Reset(); + m_Device.Reset(); + if(m_VideoProcessorEnumerator){ + m_VideoProcessorEnumerator.Reset(); + } + if(m_VideoProcessor){ + m_VideoProcessor.Reset(); + } + + if (SUCCEEDED(D3D11CreateDevice( + adapter.Get(), + D3D_DRIVER_TYPE_UNKNOWN, + nullptr, + D3D11_CREATE_DEVICE_VIDEO_SUPPORT, + nullptr, + 0, + D3D11_SDK_VERSION, + &m_Device, + nullptr, + &m_DeviceContext)) + && createVideoProcessor()){ + + m_VideoEnhancement->setVendorID(adapterDesc.VendorId); + + if(adapterIndex >= 0){ + // Convert wchar[128] to string + std::wstring GPUname(adapterDesc.Description); + qInfo() << "GPU used for Video Enhancement: " << GPUname; + + // Test, but do not active yet to ensure it will be reinitialize when needed + if(m_VideoEnhancement->isVendorAMD()){ + m_VideoEnhancement->setVSRcapable(enableAMDVideoSuperResolution(false)); + m_VideoEnhancement->setHDRcapable(enableAMDHDR(false)); + } else if(m_VideoEnhancement->isVendorIntel()){ + m_VideoEnhancement->setVSRcapable(enableIntelVideoSuperResolution(false)); + m_VideoEnhancement->setHDRcapable(enableIntelHDR(false)); + } else if(m_VideoEnhancement->isVendorNVIDIA()){ + m_VideoEnhancement->setVSRcapable(enableNvidiaVideoSuperResolution(false)); + m_VideoEnhancement->setHDRcapable(enableNvidiaHDR(false)); + } else if (m_VideoProcessorCapabilities.AutoStreamCaps & D3D11_VIDEO_PROCESSOR_AUTO_STREAM_CAPS_SUPER_RESOLUTION){ + // Try Auto Stream Super Resolution provided by DirectX11+ and agnostic to any Vendor + m_AutoStreamSuperResolution = true; + m_VideoEnhancement->setVSRcapable(true); + } + + // Enable the visibility of Video enhancement feature in the settings of the User interface + m_VideoEnhancement->enableUIvisible(); + } + } + } + } + + m_DeviceContext.Reset(); + m_Device.Reset(); + if(m_VideoProcessorEnumerator){ + m_VideoProcessorEnumerator.Reset(); + } + if(m_VideoProcessor){ + m_VideoProcessor.Reset(); + } + + return adapterIndex; +} + +/** + * \brief Enable Video Super-Resolution for AMD GPU + * + * This feature is available since this drive 22.3.1 (March 2022) + * https://community.amd.com/t5/gaming/amd-software-24-1-1-amd-fluid-motion-frames-an-updated-ui-and/ba-p/656213 + * + * \param bool activate Default is true, at true it enables the use of Video Super-Resolution feature + * \return bool Return true if the capability is available + */ +bool D3D11VARenderer::enableAMDVideoSuperResolution(bool activate, bool logInfo){ + // The feature is announced since Jan 23rd, 2024, with the driver 24.1.1 and on series 7000 + // https://community.amd.com/t5/gaming/amd-software-24-1-1-amd-fluid-motion-frames-an-updated-ui-and/ba-p/656213 + // But it is available as SDK since March 2022 (22.3.1) which means it might also work for series 5000 and 6000 (to be tested) + // https://github.com/GPUOpen-LibrariesAndSDKs/AMF/blob/master/amf/doc/AMF_HQ_Scaler_API.md + + AMF_RESULT res; + amf::AMFCapsPtr amfCaps; + amf::AMFIOCapsPtr pInputCaps; + + // We skip if already initialized + if(m_AmfInitialized && activate) + return true; + + amf::AMF_SURFACE_FORMAT SurfaceFormatYUV; + AMFColor backgroundColor = AMFConstructColor(0, 0, 0, 255); + + // AMF Context initialization + res = g_AMFFactory.Init(); + if (res != AMF_OK) goto Error; + res = g_AMFFactory.GetFactory()->CreateContext(&m_AmfContext); + if (res != AMF_OK) goto Error; + res = g_AMFFactory.GetFactory()->CreateComponent(m_AmfContext, AMFHQScaler, &m_AmfUpScaler); + if (res != AMF_OK) goto Error; + + res = m_AmfContext->InitDX11(m_Device.Get()); + if (res != AMF_OK) goto Error; + + // AMFHQScaler is the newest feature available (v1.4.33), so at least this one need to be accessible + m_AmfUpScaler->GetCaps(&amfCaps); + if (amfCaps != nullptr && amfCaps->GetAccelerationType() == amf::AMF_ACCEL_NOT_SUPPORTED) { + if(logInfo) SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "The hardware does not support needed AMD AMF capabilities."); + goto Error; + } + + // Format initialization + SurfaceFormatYUV = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) ? amf::AMF_SURFACE_P010 : amf::AMF_SURFACE_NV12; + + // Input Surface initialization + res = m_AmfContext->AllocSurface(amf::AMF_MEMORY_DX11, + SurfaceFormatYUV, + m_DecoderParams.width, + m_DecoderParams.height, + &m_AmfSurface); + if (res != AMF_OK) goto Error; + + // Upscale initialization + m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_OUTPUT_SIZE, ::AMFConstructSize(m_DisplayWidth, m_DisplayHeight)); + m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_ENGINE_TYPE, amf::AMF_MEMORY_DX11); + m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_ALGORITHM, AMF_HQ_SCALER_ALGORITHM_VIDEOSR1_0); + m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_KEEP_ASPECT_RATIO, true); + m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_FILL, true); + m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_FILL_COLOR, backgroundColor); + // We only apply sharpening when the picture is scaled (0 = Most sharpened / 2.00 = Not sharpened) + if (m_OutputTexture.width == m_DecoderParams.width && m_OutputTexture.height == m_DecoderParams.height){ + m_AmfUpScalerSharpness = false; + } else { + m_AmfUpScalerSharpness = true; + } + m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_SHARPNESS, m_AmfUpScalerSharpness ? 0.30 : 2.00); + m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_FRAME_RATE, m_DecoderParams.frameRate); + // Initialize with the size of the texture that will be input + m_AmfUpScalerSurfaceFormat = SurfaceFormatYUV; + res = m_AmfUpScaler->Init(SurfaceFormatYUV, + m_DecoderParams.width, + m_DecoderParams.height); + if (res != AMF_OK) goto Error; + + if(!activate){ + // Up Scaler + m_AmfUpScaler->Terminate(); + m_AmfUpScaler = nullptr; + // Context + m_AmfContext->Terminate(); + m_AmfContext = nullptr; + // Factory + g_AMFFactory.Terminate(); + + if(logInfo) SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "AMD Video Super Resolution disabled"); + } else { + if(logInfo) SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "AMD Video Super Resolution enabled"); + } + + m_AmfInitialized = activate; + return true; + +Error: + if(logInfo) SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "AMD Video Super Resolution failed."); + m_AmfInitialized = false; + return false; +} + +/** + * \brief Enable Video Super-Resolution for Intel GPU + * + * This experimental feature from Intel is available starting from Intel iGPU from CPU Gen 10th (Skylake) and Intel graphics driver 27.20.100.8681 (Sept 15, 2020) + * Only Arc GPUs seem to provide visual improvement + * https://www.techpowerup.com/305558/intel-outs-video-super-resolution-for-chromium-browsers-works-with-igpus-11th-gen-onward + * Values from Chromium source code: + * https://chromium.googlesource.com/chromium/src/+/master/ui/gl/swap_chain_presenter.cc + * + * \param bool activate Default is true, at true it enables the use of Video Super-Resolution feature + * \return bool Return true if the capability is available + */ +bool D3D11VARenderer::enableIntelVideoSuperResolution(bool activate, bool logInfo){ + HRESULT hr; + + constexpr GUID GUID_INTEL_VPE_INTERFACE = {0xedd1d4b9, 0x8659, 0x4cbc, {0xa4, 0xd6, 0x98, 0x31, 0xa2, 0x16, 0x3a, 0xc3}}; + constexpr UINT kIntelVpeFnVersion = 0x01; + constexpr UINT kIntelVpeFnMode = 0x20; + constexpr UINT kIntelVpeFnScaling = 0x37; + constexpr UINT kIntelVpeVersion3 = 0x0003; + constexpr UINT kIntelVpeModeNone = 0x0; + constexpr UINT kIntelVpeModePreproc = 0x01; + constexpr UINT kIntelVpeScalingDefault = 0x0; + constexpr UINT kIntelVpeScalingSuperResolution = 0x2; + + UINT param = 0; + + struct IntelVpeExt + { + UINT function; + void* param; + }; + + IntelVpeExt ext{0, ¶m}; + + ext.function = kIntelVpeFnVersion; + param = kIntelVpeVersion3; + + hr = m_VideoContext->VideoProcessorSetOutputExtension( + m_VideoProcessor.Get(), &GUID_INTEL_VPE_INTERFACE, sizeof(ext), &ext); + if (FAILED(hr)) + { + SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, + "Intel VPE version failed: %x", + hr); + return false; + } + + ext.function = kIntelVpeFnMode; + if(activate){ + param = kIntelVpeModePreproc; + } else { + param = kIntelVpeModeNone; + } + + hr = m_VideoContext->VideoProcessorSetOutputExtension( + m_VideoProcessor.Get(), &GUID_INTEL_VPE_INTERFACE, sizeof(ext), &ext); + if (FAILED(hr)) + { + SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, + "Intel VPE mode failed: %x", + hr); + return false; + } + + ext.function = kIntelVpeFnScaling; + if(activate){ + param = kIntelVpeScalingSuperResolution; + } else { + param = kIntelVpeScalingDefault; + } + + hr = m_VideoContext->VideoProcessorSetStreamExtension( + m_VideoProcessor.Get(), 0, &GUID_INTEL_VPE_INTERFACE, sizeof(ext), &ext); + if (FAILED(hr)) + { + SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, + "Intel Video Super Resolution failed: %x", + hr); + return false; + } + + if(activate){ + if(logInfo) SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "Intel Video Super Resolution enabled"); + } else { + if(logInfo) SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "Intel Video Super Resolution disabled"); + } + + return true; +} + +/** + * \brief Enable Video Super-Resolution for NVIDIA + * + * This feature is available starting from series NVIDIA RTX 2000 and GeForce driver 545.84 (Oct 17, 2023) + * + * IMPORTANT (Feb 5th, 2024): RTX VSR seems to be limited to SDR content only, + * it does add a grey filter if it is activated while HDR is on on stream (Host setting does not impact it). + * It might be fixed later by NVIDIA, but the temporary solution is to disable the feature when Stream content is HDR-on + * Values from Chromium source code: + * https://chromium.googlesource.com/chromium/src/+/master/ui/gl/swap_chain_presenter.cc + * + * \param bool activate Default is true, at true it enables the use of Video Super-Resolution feature + * \return bool Return true if the capability is available + */ +bool D3D11VARenderer::enableNvidiaVideoSuperResolution(bool activate, bool logInfo){ + HRESULT hr; + + // Toggle VSR + constexpr GUID GUID_NVIDIA_PPE_INTERFACE = {0xd43ce1b3, 0x1f4b, 0x48ac, {0xba, 0xee, 0xc3, 0xc2, 0x53, 0x75, 0xe6, 0xf7}}; + constexpr UINT kStreamExtensionVersionV1 = 0x1; + constexpr UINT kStreamExtensionMethodSuperResolution = 0x2; + + struct NvidiaStreamExt + { + UINT version; + UINT method; + UINT enable; + }; + + // Convert bool to UINT + UINT enable = activate; + + NvidiaStreamExt stream_extension_info = {kStreamExtensionVersionV1, kStreamExtensionMethodSuperResolution, enable}; + hr = m_VideoContext->VideoProcessorSetStreamExtension(m_VideoProcessor.Get(), 0, &GUID_NVIDIA_PPE_INTERFACE, sizeof(stream_extension_info), &stream_extension_info); + if (FAILED(hr)) { + SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, + "NVIDIA RTX Video Super Resolution failed: %x", + hr); + return false; + } + + if(activate){ + if(logInfo) SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "NVIDIA RTX Video Super Resolution enabled"); + } else { + if(logInfo) SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "NVIDIA RTX Video Super Resolution disabled"); + } + + return true; +} + +/** + * \brief Enable HDR for AMD GPU + * + * This feature is not availble for AMD, and has not yet been announced (by Jan 24th, 2024) + * + * \param bool activate Default is true, at true it enables the use of HDR feature + * \return bool Return true if the capability is available + */ +bool D3D11VARenderer::enableAMDHDR(bool activate, bool logInfo){ + + // [TODO] Feature not yet announced + + if(logInfo) SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "AMD HDR capability is not yet supported by your client's GPU."); + return false; +} + +/** + * \brief Enable HDR for Intel GPU + * + * This feature is not availble for Intel, and has not yet been announced (by Jan 24th, 2024) + * + * \param bool activate Default is true, at true it enables the use of HDR feature + * \return bool Return true if the capability is available + */ +bool D3D11VARenderer::enableIntelHDR(bool activate, bool logInfo){ + + // [TODO] Feature not yet announced + + if(logInfo) SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "Intel HDR capability is not yet supported by your client's GPU."); + return false; +} + +/** + * \brief Enable HDR for NVIDIA + * + * This feature is available starting from series NVIDIA RTX 2000 and GeForce driver 545.84 (Oct 17, 2023) + * + * Note: Even if the feature is enabled, I could not find any settings of ColorSpace and DXG8Format which + * can work without having the screen darker. Here are what I found: + * 1) Moonlight HDR: Checked / SwapChain: DXGI_FORMAT_R10G10B10A2_UNORM / VideoTexture: DXGI_FORMAT_P010 => SDR convert to HDR, but with darker rendering + * 2) Moonlight HDR: Unchecked / SwapChain: DXGI_FORMAT_R10G10B10A2_UNORM / VideoTexture: DXGI_FORMAT_NV12 => SDR convert to HDR, but with darker rendering + * Values from Chromium source code: + * https://chromium.googlesource.com/chromium/src/+/master/ui/gl/swap_chain_presenter.cc + * + * \param bool activate Default is true, at true it enables the use of HDR feature + * \return bool Return true if the capability is available + */ +bool D3D11VARenderer::enableNvidiaHDR(bool activate, bool logInfo){ + HRESULT hr; + + // Toggle HDR + constexpr GUID GUID_NVIDIA_TRUE_HDR_INTERFACE = {0xfdd62bb4, 0x620b, 0x4fd7, {0x9a, 0xb3, 0x1e, 0x59, 0xd0, 0xd5, 0x44, 0xb3}}; + constexpr UINT kStreamExtensionVersionV4 = 0x4; + constexpr UINT kStreamExtensionMethodTrueHDR = 0x3; + + struct NvidiaStreamExt + { + UINT version; + UINT method; + UINT enable : 1; + UINT reserved : 31; + }; + + // Convert bool to UINT + UINT enable = activate; + + NvidiaStreamExt stream_extension_info = {kStreamExtensionVersionV4, kStreamExtensionMethodTrueHDR, enable, 0u}; + hr = m_VideoContext->VideoProcessorSetStreamExtension(m_VideoProcessor.Get(), 0, &GUID_NVIDIA_TRUE_HDR_INTERFACE, sizeof(stream_extension_info), &stream_extension_info); + if (FAILED(hr)) { + SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, + "NVIDIA RTX HDR failed: %x", + hr); + return false; + } + + if(activate){ + if(logInfo) SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "NVIDIA RTX HDR enabled"); + } else { + if(logInfo) SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "NVIDIA RTX HDR disabled"); + } + + return true; +} + bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) { - int adapterIndex, outputIndex; HRESULT hr; m_DecoderParams = *params; + // Use only even number to avoid a crash a texture creation + m_DecoderParams.width = m_DecoderParams.width & ~1; + m_DecoderParams.height = m_DecoderParams.height & ~1; + if (qgetenv("D3D11VA_ENABLED") == "0") { SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "D3D11VA is disabled by environment variable"); @@ -318,15 +1000,50 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) return false; } + // By default try the adapter corresponding to the display where our window resides. + // This will let us avoid a copy if the display GPU has the required decoder. + // If Video enhancement is enabled, it will look for the most capable GPU in case of multiple GPUs. if (!SDL_DXGIGetOutputInfo(SDL_GetWindowDisplayIndex(params->window), - &adapterIndex, &outputIndex)) { + &m_AdapterIndex, &m_OutputIndex)) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "SDL_DXGIGetOutputInfo() failed: %s", SDL_GetError()); return false; } - hr = CreateDXGIFactory(__uuidof(IDXGIFactory5), (void**)&m_Factory); + // Use the current window size as the swapchain size + SDL_GetWindowSize(m_DecoderParams.window, (int*)&m_DisplayWidth, (int*)&m_DisplayHeight); + + // Rounddown to even number to avoid a crash at texture creation + m_DisplayWidth = m_DisplayWidth & ~1; + m_DisplayHeight = m_DisplayHeight & ~1; + + // As m_Display corresponds to the application window, which may not have the same ratio as the Frame, + // we calculate the size of the final texture to fit in the window without distortion + m_OutputTexture.width = m_DisplayWidth; + m_OutputTexture.height = m_DisplayHeight; + m_OutputTexture.left = 0; + m_OutputTexture.top = 0; + + // Sscale the source to the destination surface while keeping the same ratio + float ratioWidth = static_cast(m_DisplayWidth) / static_cast(m_DecoderParams.width); + float ratioHeight = static_cast(m_DisplayHeight) / static_cast(m_DecoderParams.height); + + if(ratioHeight < ratioWidth){ + // Adjust the Width + m_OutputTexture.width = static_cast(std::floor(m_DecoderParams.width * ratioHeight)); + m_OutputTexture.width = m_OutputTexture.width & ~1; + m_OutputTexture.left = static_cast(std::floor( abs(m_DisplayWidth - m_OutputTexture.width) / 2 )); + m_OutputTexture.left = m_OutputTexture.left & ~1; + } else if(ratioWidth < ratioHeight) { + // Adjust the Height + m_OutputTexture.height = static_cast(std::floor(m_DecoderParams.height * ratioWidth)); + m_OutputTexture.height = m_OutputTexture.height & ~1; + m_OutputTexture.top = static_cast(std::floor( abs(m_DisplayHeight - m_OutputTexture.height) / 2 )); + m_OutputTexture.top = m_OutputTexture.top & ~1; + } + + hr = CreateDXGIFactory(__uuidof(IDXGIFactory5), (void**)m_Factory.GetAddressOf()); if (FAILED(hr)) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "CreateDXGIFactory() failed: %x", @@ -334,14 +1051,37 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) return false; } - // First try the adapter corresponding to the display where our window resides. - // This will let us avoid a copy if the display GPU has the required decoder. - if (!createDeviceByAdapterIndex(adapterIndex)) { + // If getAdapterIndex return 0+, it means that we already identified which adapter best fit for Video enhancement, + // so we don't have to estimate it more times to speed up the launch of the streaming. + if(m_VideoEnhancement->getAdapterIndex() < 0){ + // This line is run only once during the application life and is necessary to display (or not) + // the Video enhancement checkbox if the GPU enables it + int adapterIndex = getAdapterIndexByEnhancementCapabilities(); + if(adapterIndex >= 0){ + m_VideoEnhancement->setAdapterIndex(adapterIndex); + } else { + m_VideoEnhancement->setAdapterIndex(m_AdapterIndex); + } + } + + if(m_VideoEnhancement->isEnhancementCapable()){ + // Check if the user has enable Video enhancement + m_VideoEnhancement->enableVideoEnhancement(m_DecoderParams.enableVideoEnhancement); + } + + // Set the adapter index of the most appropriate GPU + if( + m_VideoEnhancement->isVideoEnhancementEnabled() + && m_VideoEnhancement->getAdapterIndex() >= 0 + ){ + m_AdapterIndex = m_VideoEnhancement->getAdapterIndex(); + } + if (!createDeviceByAdapterIndex(m_AdapterIndex)) { // If that didn't work, we'll try all GPUs in order until we find one // or run out of GPUs (DXGI_ERROR_NOT_FOUND from EnumAdapters()) bool adapterNotFound = false; for (int i = 0; !adapterNotFound; i++) { - if (i == adapterIndex) { + if (i == m_AdapterIndex) { // Don't try the same GPU again continue; } @@ -359,6 +1099,46 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) } } + if(m_BindDecoderOutputTextures){ + // Disable Video enhancement as we do not copy the frame to process it + m_VideoEnhancement->enableVideoEnhancement(false); + } + + // Set VSR and HDR + if(m_VideoEnhancement->isVideoEnhancementEnabled()){ + // Enable VSR feature if available + if(m_VideoEnhancement->isVSRcapable()){ + // Try Auto Stream Super Resolution provided by DirectX11+ and agnostic to any Vendor + if (m_AutoStreamSuperResolution){ + // The flag does exist, but not the method yet (by March 8th, 2024) + // We still can prepare the code once Microsof enable it. + // m_VideoContext->VideoProcessorSetStreamSuperResolution(m_VideoProcessor.Get(), 0, true); + } else if(m_VideoEnhancement->isVendorAMD()){ + enableAMDVideoSuperResolution(); + } else if(m_VideoEnhancement->isVendorIntel()){ + enableIntelVideoSuperResolution(); + } else if(m_VideoEnhancement->isVendorNVIDIA()){ + enableNvidiaVideoSuperResolution(); + } + } + + // Enable SDR->HDR feature if available + if(m_VideoEnhancement->isHDRcapable()){ + if(m_VideoEnhancement->isVendorAMD()){ + enableAMDHDR(); + } else if(m_VideoEnhancement->isVendorIntel()){ + enableIntelHDR(); + } else if(m_VideoEnhancement->isVendorNVIDIA()){ + if(m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT){ + // Disable SDR->HDR feature because the screen becomes grey when activated + enableNvidiaHDR(false); + } else { + enableNvidiaHDR(); + } + } + } + } + DXGI_SWAP_CHAIN_DESC1 swapChainDesc = {}; swapChainDesc.Stereo = FALSE; swapChainDesc.SampleDesc.Count = 1; @@ -387,12 +1167,8 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) // causes performance issues (buffer starvation) on AMD GPUs. swapChainDesc.BufferCount = 3 + 1 + 1; - // Use the current window size as the swapchain size - SDL_GetWindowSize(params->window, (int*)&swapChainDesc.Width, (int*)&swapChainDesc.Height); - - m_DisplayWidth = swapChainDesc.Width; - m_DisplayHeight = swapChainDesc.Height; - + swapChainDesc.Width = m_DisplayWidth; + swapChainDesc.Height = m_DisplayHeight; if (params->videoFormat & VIDEO_FORMAT_MASK_10BIT) { swapChainDesc.Format = DXGI_FORMAT_R10G10B10A2_UNORM; } @@ -428,7 +1204,7 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) // DXVA2 may let us take over for FSE V-sync off cases. However, if we don't have DXGI_FEATURE_PRESENT_ALLOW_TEARING // then we should not attempt to do this unless there's no other option (HDR, DXVA2 failed in pass 1, etc). if (!m_AllowTearing && m_DecoderSelectionPass == 0 && !(params->videoFormat & VIDEO_FORMAT_MASK_10BIT) && - (SDL_GetWindowFlags(params->window) & SDL_WINDOW_FULLSCREEN_DESKTOP) == SDL_WINDOW_FULLSCREEN) { + (SDL_GetWindowFlags(params->window) & SDL_WINDOW_FULLSCREEN_DESKTOP) == SDL_WINDOW_FULLSCREEN) { SDL_LogWarn(SDL_LOG_CATEGORY_APPLICATION, "Defaulting to DXVA2 for FSE without DXGI_FEATURE_PRESENT_ALLOW_TEARING support"); return false; @@ -448,7 +1224,7 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) &swapChainDesc, nullptr, nullptr, - &swapChain); + swapChain.GetAddressOf()); if (FAILED(hr)) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, @@ -476,19 +1252,11 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) return false; } - // Surfaces must be 16 pixel aligned for H.264 and 128 pixel aligned for everything else - // https://github.com/FFmpeg/FFmpeg/blob/a234e5cd80224c95a205c1f3e297d8c04a1374c3/libavcodec/dxva2.c#L609-L616 - m_TextureAlignment = (params->videoFormat & VIDEO_FORMAT_MASK_H264) ? 16 : 128; - - if (!setupRenderingResources()) { - return false; - } - { m_HwDeviceContext = av_hwdevice_ctx_alloc(AV_HWDEVICE_TYPE_D3D11VA); if (!m_HwDeviceContext) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, - "Failed to allocate D3D11VA device context"); + "Failed to allocate D3D11VA device context"); return false; } @@ -513,11 +1281,15 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) } } + // Surfaces must be 16 pixel aligned for H.264 and 128 pixel aligned for everything else + // https://github.com/FFmpeg/FFmpeg/blob/a234e5cd80224c95a205c1f3e297d8c04a1374c3/libavcodec/dxva2.c#L609-L616 + m_TextureAlignment = (params->videoFormat & VIDEO_FORMAT_MASK_H264) ? 16 : 128; + { m_HwFramesContext = av_hwframe_ctx_alloc(m_HwDeviceContext); if (!m_HwFramesContext) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, - "Failed to allocate D3D11VA frame context"); + "Failed to allocate D3D11VA frame context"); return false; } @@ -573,6 +1345,27 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) } } + m_SrcBox.left = 0; + m_SrcBox.top = 0; + m_SrcBox.right = m_DecoderParams.width; + m_SrcBox.bottom = m_DecoderParams.height; + m_SrcBox.front = 0; + m_SrcBox.back = 1; + + // Create our video textures and SRVs + if (!setupEnhancedTexture() || !setupAmfTexture()) { + return false; + } + + // As for Video Enhancement, the RTV uses a texture, it needs to be setup after the textures creation + if (!setupRenderingResources()) { + return false; + } + + if(m_VideoProcessor && m_VideoEnhancement->isVideoEnhancementEnabled()){ + initializeVideoProcessor(); + } + return true; } @@ -608,6 +1401,11 @@ void D3D11VARenderer::renderFrame(AVFrame* frame) // because the render target view will be unbound by Present(). m_DeviceContext->OMSetRenderTargets(1, m_RenderTargetView.GetAddressOf(), nullptr); + // Prepare the Enhanced Output + if(m_VideoEnhancement->isVideoEnhancementEnabled()){ + prepareEnhancedOutput(frame); + } + // Render our video frame with the aspect-ratio adjusted viewport renderVideo(frame); @@ -802,7 +1600,7 @@ void D3D11VARenderer::bindColorConversion(AVFrame* frame) constData.pSysMem = &constBuf; ComPtr constantBuffer; - HRESULT hr = m_Device->CreateBuffer(&constDesc, &constData, &constantBuffer); + HRESULT hr = m_Device->CreateBuffer(&constDesc, &constData, constantBuffer.GetAddressOf()); if (SUCCEEDED(hr)) { m_DeviceContext->PSSetConstantBuffers(1, 1, constantBuffer.GetAddressOf()); } @@ -818,6 +1616,72 @@ void D3D11VARenderer::bindColorConversion(AVFrame* frame) m_LastFullRange = fullRange; } +/** + * \brief Set the output for enhanced rendering + * + * According to the colorspace from the source, set the corresponding output colorspace. + * For AMF, disable the sharpness when HDR is on on Host + * + * \param AVFrame* frame The frame to be displayed on screen + * \return void + */ +void D3D11VARenderer::prepareEnhancedOutput(AVFrame* frame) +{ + bool frameFullRange = isFrameFullRange(frame); + int frameColorSpace = getFrameColorspace(frame); + + // If nothing has changed since last frame, we're done + if (frameColorSpace == m_LastColorSpace && frameFullRange == m_LastFullRange) { + return; + } + + m_LastColorSpace = frameColorSpace; + m_LastFullRange = frameFullRange; + + switch (frameColorSpace) { + + case COLORSPACE_REC_2020: + m_VideoContext->VideoProcessorSetStreamColorSpace1(m_VideoProcessor.Get(), 0, frameFullRange ? DXGI_COLOR_SPACE_RGB_FULL_G2084_NONE_P2020 : DXGI_COLOR_SPACE_RGB_STUDIO_G2084_NONE_P2020); + m_VideoContext->VideoProcessorSetOutputColorSpace1(m_VideoProcessor.Get(), frameFullRange ? DXGI_COLOR_SPACE_RGB_FULL_G2084_NONE_P2020 : DXGI_COLOR_SPACE_RGB_STUDIO_G2084_NONE_P2020); + + if(m_VideoEnhancement->isVendorNVIDIA()){ + // VSR from Nvidia does not work yet on HDR content (Observation by March 28th, 2024) + // https://en.wikipedia.org/wiki/Video_Super_Resolution#:~:text=The%20feature%20supports%20input%20resolutions,likely%20added%20in%20the%20future + enableNvidiaVideoSuperResolution(false); + } else if(m_VideoEnhancement->isVendorIntel()){ + // Enable VSR for Intel when the Host has HDR activated. + enableIntelVideoSuperResolution(); + } + if(m_AmfInitialized){ + // Disable sharpness when HDR is enable on client side because it generates white borders + m_AmfUpScaler->Flush(); + m_AmfUpScaler->Terminate(); + m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_SHARPNESS, 2.00); + m_AmfUpScaler->Init(m_AmfUpScalerSurfaceFormat, m_DecoderParams.width, m_DecoderParams.height); + } + break; + + default: + m_VideoContext->VideoProcessorSetStreamColorSpace1(m_VideoProcessor.Get(), 0, frameFullRange ? DXGI_COLOR_SPACE_RGB_FULL_G22_NONE_P709 : DXGI_COLOR_SPACE_RGB_STUDIO_G22_NONE_P709); + m_VideoContext->VideoProcessorSetOutputColorSpace1(m_VideoProcessor.Get(), frameFullRange ? DXGI_COLOR_SPACE_RGB_FULL_G22_NONE_P709 : DXGI_COLOR_SPACE_RGB_STUDIO_G22_NONE_P709); + + if(m_VideoEnhancement->isVendorNVIDIA()){ + // Always enable NVIDIA VSR for SDR content + enableNvidiaVideoSuperResolution(); + } else if(m_VideoEnhancement->isVendorIntel()){ + // Disable VSR for Intel when the Host has HDR disactivated to avoid having a grey screen. + enableIntelVideoSuperResolution(false); + } + if(m_AmfInitialized){ + // Enable Sharpness for Non-HDR source (host) + m_AmfUpScaler->Flush(); + m_AmfUpScaler->Terminate(); + m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_SHARPNESS, m_AmfUpScalerSharpness ? 0.30 : 2.00); + m_AmfUpScaler->Init(m_AmfUpScalerSurfaceFormat, m_DecoderParams.width, m_DecoderParams.height); + } + } +} + void D3D11VARenderer::renderVideo(AVFrame* frame) { // Bind video rendering vertex buffer @@ -825,7 +1689,7 @@ void D3D11VARenderer::renderVideo(AVFrame* frame) UINT offset = 0; m_DeviceContext->IASetVertexBuffers(0, 1, m_VideoVertexBuffer.GetAddressOf(), &stride, &offset); - UINT srvIndex; + UINT srvIndex = 0; if (m_BindDecoderOutputTextures) { // Our indexing logic depends on a direct mapping into m_VideoTextureResourceViews // based on the texture index provided by FFmpeg. @@ -838,7 +1702,6 @@ void D3D11VARenderer::renderVideo(AVFrame* frame) return; } - // Ensure decoding operations have completed using a dummy fence. // This is not necessary on modern GPU drivers, but it is required // on some older Intel GPU drivers that don't properly synchronize @@ -859,19 +1722,33 @@ void D3D11VARenderer::renderVideo(AVFrame* frame) } } } - else { - // Copy this frame (minus alignment padding) into our video texture - D3D11_BOX srcBox; - srcBox.left = 0; - srcBox.top = 0; - srcBox.right = m_DecoderParams.width; - srcBox.bottom = m_DecoderParams.height; - srcBox.front = 0; - srcBox.back = 1; - m_DeviceContext->CopySubresourceRegion(m_VideoTexture.Get(), 0, 0, 0, 0, (ID3D11Resource*)frame->data[0], (int)(intptr_t)frame->data[1], &srcBox); + else if(m_AmfInitialized){ + // AMD (RDNA2+) + + // Copy this frame (minus alignment padding) into a temporary video texture + m_DeviceContext->CopySubresourceRegion(m_AmfTexture.Get(), 0, 0, 0, 0, (ID3D11Resource*)frame->data[0], (int)(intptr_t)frame->data[1], &m_SrcBox); + m_AmfContext->CreateSurfaceFromDX11Native(m_AmfTexture.Get(), &m_AmfSurface, nullptr); + + // Up Scaling => To a higher resolution than the application window to give more surface to the VSR to generate details and thus picture clarity + m_AmfUpScaler->SubmitInput(m_AmfSurface); + m_AmfUpScaler->QueryOutput(&m_AmfData); + m_AmfUpScaler->Flush(); + + m_AmfData->QueryInterface(amf::AMFSurface::IID(), reinterpret_cast(&m_AmfSurface)); + m_DeviceContext->CopyResource(m_VideoTexture.Get(), (ID3D11Texture2D*)m_AmfSurface->GetPlaneAt(0)->GetNative()); + } else if(m_VideoEnhancement->isVideoEnhancementEnabled() && !m_AmfInitialized){ + // NVIDIA RTX 2000+ + // Intel Arc+ + + // Copy this frame (minus alignment padding) into a temporary video texture + m_DeviceContext->CopySubresourceRegion(m_EnhancedTexture.Get(), 0, 0, 0, 0, (ID3D11Resource*)frame->data[0], (int)(intptr_t)frame->data[1], &m_SrcBox); + // Process operations on the output Texture + m_VideoContext->VideoProcessorBlt(m_VideoProcessor.Get(), m_OutputView.Get(), 0, 1, &m_StreamData); + } else { + // No Enhancement processing - // SRV 0 is always mapped to the video texture - srvIndex = 0; + // Copy this frame (minus alignment padding) into a temporary video texture + m_DeviceContext->CopySubresourceRegion(m_VideoTexture.Get(), 0, 0, 0, 0, (ID3D11Resource*)frame->data[0], (int)(intptr_t)frame->data[1], &m_SrcBox); } // Bind our CSC shader (and constant buffer, if required) @@ -881,7 +1758,7 @@ void D3D11VARenderer::renderVideo(AVFrame* frame) ID3D11ShaderResourceView* frameSrvs[] = { m_VideoTextureResourceViews[srvIndex][0].Get(), m_VideoTextureResourceViews[srvIndex][1].Get() }; m_DeviceContext->PSSetShaderResources(0, 2, frameSrvs); - // Draw the video + // Process shaders on the output texture m_DeviceContext->DrawIndexed(6, 0, 0); // Unbind SRVs for this frame @@ -889,6 +1766,181 @@ void D3D11VARenderer::renderVideo(AVFrame* frame) m_DeviceContext->PSSetShaderResources(0, 2, nullSrvs); } +/** + * \brief Add the Video Processor to the pipeline + * + * Creating a Video Processor add additional GPU video processing method like AI Upscaling + * + * \return bool Returns true if the Video processor is successfully created + */ +bool D3D11VARenderer::createVideoProcessor() +{ + HRESULT hr; + D3D11_VIDEO_PROCESSOR_CONTENT_DESC content_desc; + + if(m_VideoProcessorEnumerator){ + m_VideoProcessorEnumerator.Reset(); + } + if(m_VideoProcessor){ + m_VideoProcessor.Reset(); + } + + // Get video device + hr = m_Device->QueryInterface(__uuidof(ID3D11VideoDevice), + (void**)m_VideoDevice.GetAddressOf()); + if (FAILED(hr)) { + return false; + } + + // Get video context + hr = m_DeviceContext->QueryInterface(__uuidof(ID3D11VideoContext2), + (void**)m_VideoContext.GetAddressOf()); + if (FAILED(hr)) { + return false; + } + + ZeroMemory(&content_desc, sizeof(content_desc)); + content_desc.InputFrameFormat = D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE; + content_desc.InputFrameRate.Numerator = m_DecoderParams.frameRate; + content_desc.InputFrameRate.Denominator = 1; + content_desc.InputWidth = m_DecoderParams.width; + content_desc.InputHeight = m_DecoderParams.height; + content_desc.OutputWidth = m_DisplayWidth; + content_desc.OutputHeight = m_DisplayHeight; + content_desc.OutputFrameRate.Numerator = m_DecoderParams.frameRate; + content_desc.OutputFrameRate.Denominator = 1; + content_desc.Usage = D3D11_VIDEO_USAGE_PLAYBACK_NORMAL; + + hr = m_VideoDevice->CreateVideoProcessorEnumerator(&content_desc, &m_VideoProcessorEnumerator); + if (FAILED(hr)) + return false; + + hr = m_VideoDevice->CreateVideoProcessor(m_VideoProcessorEnumerator.Get(), 0, + &m_VideoProcessor); + if (FAILED(hr)) + return false; + + hr = m_VideoProcessorEnumerator->GetVideoProcessorCaps(&m_VideoProcessorCapabilities); + if (FAILED(hr)) { + return false; + } + + return true; +} + +/** + * \brief Set the Video Processor to the pipeline + * + * Set proper Color space, filtering, and additional GPU video processing method like AI Upscaling + * + * \return bool Returns true if the Video processor is successfully setup + */ +bool D3D11VARenderer::initializeVideoProcessor() +{ + HRESULT hr; + + // INPUT setting + D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC inputViewDesc; + + ZeroMemory(&inputViewDesc, sizeof(inputViewDesc)); + inputViewDesc.FourCC = 0; + inputViewDesc.ViewDimension = D3D11_VPIV_DIMENSION_TEXTURE2D; + inputViewDesc.Texture2D.MipSlice = 0; + inputViewDesc.Texture2D.ArraySlice = 0; + + hr = m_VideoDevice->CreateVideoProcessorInputView( + m_EnhancedTexture.Get(), + m_VideoProcessorEnumerator.Get(), + &inputViewDesc, + (ID3D11VideoProcessorInputView**)&m_InputView); + if (FAILED(hr)) + return false; + + RECT inputRect = { 0 }; + inputRect.right = m_DisplayWidth; + inputRect.bottom = m_DisplayHeight; + m_VideoContext->VideoProcessorSetStreamDestRect(m_VideoProcessor.Get(), 0, true, &inputRect); + + m_VideoContext->VideoProcessorSetStreamFrameFormat(m_VideoProcessor.Get(), 0, D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE); + + // Initialize Color spaces, this will be adjusted once the first frame is received + if(m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT){ + m_VideoContext->VideoProcessorSetStreamColorSpace1(m_VideoProcessor.Get(), 0, DXGI_COLOR_SPACE_RGB_STUDIO_G2084_NONE_P2020); + } else { + m_VideoContext->VideoProcessorSetStreamColorSpace1(m_VideoProcessor.Get(), 0, DXGI_COLOR_SPACE_RGB_STUDIO_G22_NONE_P709); + } + + + // OUTPUT setting + D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC outputViewDesc; + + ZeroMemory(&outputViewDesc, sizeof(outputViewDesc)); + outputViewDesc.ViewDimension = D3D11_VPOV_DIMENSION_TEXTURE2D; + outputViewDesc.Texture2D.MipSlice = 0; + + hr = m_VideoDevice->CreateVideoProcessorOutputView( + m_VideoTexture.Get(), + m_VideoProcessorEnumerator.Get(), + &outputViewDesc, + (ID3D11VideoProcessorOutputView**)&m_OutputView); + if (FAILED(hr)) + return false; + + RECT targetRect = { 0 }; + targetRect.right = m_DisplayWidth; + targetRect.bottom = m_DisplayHeight; + m_VideoContext->VideoProcessorSetOutputTargetRect(m_VideoProcessor.Get(), true, &targetRect); + + m_VideoContext->VideoProcessorSetStreamOutputRate(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_OUTPUT_RATE_NORMAL, false, NULL); + + // Set Background color + D3D11_VIDEO_COLOR bgColor; + bgColor.RGBA = { 0, 0, 0, 1 }; // black color + m_VideoContext->VideoProcessorSetOutputBackgroundColor(m_VideoProcessor.Get(), false, &bgColor); + + // Initialize Color spaces, this will be adjusted once the first frame is received + if(m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT){ + m_VideoContext->VideoProcessorSetOutputColorSpace1(m_VideoProcessor.Get(), DXGI_COLOR_SPACE_RGB_STUDIO_G2084_NONE_P2020); + } else { + m_VideoContext->VideoProcessorSetOutputColorSpace1(m_VideoProcessor.Get(), DXGI_COLOR_SPACE_RGB_STUDIO_G22_NONE_P709); + } + + // The section is a customization per vendor to slightly enhance (non-AI methods) the frame appearance. + // It does work in addition to AI-enhancement for better result. + if(m_VideoEnhancement->isVendorAMD()){ + // AMD does not have such filters + } else if(m_VideoEnhancement->isVendorIntel()){ + // Reduce blocking artifacts + if (m_VideoProcessorCapabilities.FilterCaps & D3D11_VIDEO_PROCESSOR_FILTER_NOISE_REDUCTION) + m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_FILTER_NOISE_REDUCTION, true, 30); // (0 / 0 / 100) + // Sharpen sligthly the picture to enhance details + if (m_VideoProcessorCapabilities.FilterCaps & D3D11_VIDEO_PROCESSOR_FILTER_EDGE_ENHANCEMENT) + m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_FILTER_EDGE_ENHANCEMENT, true, 20); // (0 / 0 / 100) + } else if(m_VideoEnhancement->isVendorNVIDIA()){ + // Reduce blocking artifacts + if (m_VideoProcessorCapabilities.FilterCaps & D3D11_VIDEO_PROCESSOR_FILTER_NOISE_REDUCTION) + m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_FILTER_NOISE_REDUCTION, true, 30); // (0 / 0 / 100) + // Sharpen sligthly the picture to enhance details + if (m_VideoProcessorCapabilities.FilterCaps & D3D11_VIDEO_PROCESSOR_FILTER_EDGE_ENHANCEMENT) + m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_FILTER_EDGE_ENHANCEMENT, true, 20); // (0 / 0 / 100) + } + + ZeroMemory(&m_StreamData, sizeof(m_StreamData)); + m_StreamData.Enable = true; + m_StreamData.OutputIndex = m_OutputIndex; + m_StreamData.InputFrameOrField = 0; + m_StreamData.PastFrames = 0; + m_StreamData.FutureFrames = 0; + m_StreamData.ppPastSurfaces = nullptr; + m_StreamData.ppFutureSurfaces = nullptr; + m_StreamData.pInputSurface = m_InputView.Get(); + m_StreamData.ppPastSurfacesRight = nullptr; + m_StreamData.ppFutureSurfacesRight = nullptr; + m_StreamData.pInputSurfaceRight = nullptr; + + return true; +} + // This function must NOT use any DXGI or ID3D11DeviceContext methods // since it can be called on an arbitrary thread! void D3D11VARenderer::notifyOverlayUpdated(Overlay::OverlayType type) @@ -936,9 +1988,8 @@ void D3D11VARenderer::notifyOverlayUpdated(Overlay::OverlayType type) texData.SysMemPitch = newSurface->pitch; ComPtr newTexture; - hr = m_Device->CreateTexture2D(&texDesc, &texData, &newTexture); + hr = m_Device->CreateTexture2D(&texDesc, &texData, newTexture.GetAddressOf()); if (FAILED(hr)) { - SDL_FreeSurface(newSurface); SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "ID3D11Device::CreateTexture2D() failed: %x", hr); @@ -946,9 +1997,8 @@ void D3D11VARenderer::notifyOverlayUpdated(Overlay::OverlayType type) } ComPtr newTextureResourceView; - hr = m_Device->CreateShaderResourceView((ID3D11Resource*)newTexture.Get(), nullptr, &newTextureResourceView); + hr = m_Device->CreateShaderResourceView((ID3D11Resource*)newTexture.Get(), nullptr, newTextureResourceView.GetAddressOf()); if (FAILED(hr)) { - SDL_FreeSurface(newSurface); SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "ID3D11Device::CreateShaderResourceView() failed: %x", hr); @@ -979,12 +2029,12 @@ void D3D11VARenderer::notifyOverlayUpdated(Overlay::OverlayType type) newSurface = nullptr; VERTEX verts[] = - { - {renderRect.x, renderRect.y, 0, 1}, - {renderRect.x, renderRect.y+renderRect.h, 0, 0}, - {renderRect.x+renderRect.w, renderRect.y, 1, 1}, - {renderRect.x+renderRect.w, renderRect.y+renderRect.h, 1, 0}, - }; + { + {renderRect.x, renderRect.y, 0, 1}, + {renderRect.x, renderRect.y+renderRect.h, 0, 0}, + {renderRect.x+renderRect.w, renderRect.y, 1, 1}, + {renderRect.x+renderRect.w, renderRect.y+renderRect.h, 1, 0}, + }; D3D11_BUFFER_DESC vbDesc = {}; vbDesc.ByteWidth = sizeof(verts); @@ -998,7 +2048,7 @@ void D3D11VARenderer::notifyOverlayUpdated(Overlay::OverlayType type) vbData.pSysMem = verts; ComPtr newVertexBuffer; - hr = m_Device->CreateBuffer(&vbDesc, &vbData, &newVertexBuffer); + hr = m_Device->CreateBuffer(&vbDesc, &vbData, newVertexBuffer.GetAddressOf()); if (FAILED(hr)) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "ID3D11Device::CreateBuffer() failed: %x", @@ -1016,7 +2066,6 @@ void D3D11VARenderer::notifyOverlayUpdated(Overlay::OverlayType type) bool D3D11VARenderer::checkDecoderSupport(IDXGIAdapter* adapter) { HRESULT hr; - Microsoft::WRL::ComPtr videoDevice; DXGI_ADAPTER_DESC adapterDesc; hr = adapter->GetDesc(&adapterDesc); @@ -1027,13 +2076,8 @@ bool D3D11VARenderer::checkDecoderSupport(IDXGIAdapter* adapter) return false; } - // Derive a ID3D11VideoDevice from our ID3D11Device. - hr = m_Device.As(&videoDevice); - if (FAILED(hr)) { - SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, - "ID3D11Device::QueryInterface(ID3D11VideoDevice) failed: %x", - hr); - return false; + if(m_VideoDevice == nullptr){ + createVideoProcessor(); } // Check if the format is supported by this decoder @@ -1041,7 +2085,7 @@ bool D3D11VARenderer::checkDecoderSupport(IDXGIAdapter* adapter) switch (m_DecoderParams.videoFormat) { case VIDEO_FORMAT_H264: - if (FAILED(videoDevice->CheckVideoDecoderFormat(&D3D11_DECODER_PROFILE_H264_VLD_NOFGT, DXGI_FORMAT_NV12, &supported))) { + if (FAILED(m_VideoDevice->CheckVideoDecoderFormat(&D3D11_DECODER_PROFILE_H264_VLD_NOFGT, DXGI_FORMAT_NV12, &supported))) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "GPU doesn't support H.264 decoding"); return false; @@ -1058,7 +2102,7 @@ bool D3D11VARenderer::checkDecoderSupport(IDXGIAdapter* adapter) return false; case VIDEO_FORMAT_H265: - if (FAILED(videoDevice->CheckVideoDecoderFormat(&D3D11_DECODER_PROFILE_HEVC_VLD_MAIN, DXGI_FORMAT_NV12, &supported))) { + if (FAILED(m_VideoDevice->CheckVideoDecoderFormat(&D3D11_DECODER_PROFILE_HEVC_VLD_MAIN, DXGI_FORMAT_NV12, &supported))) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "GPU doesn't support HEVC decoding"); return false; @@ -1071,7 +2115,7 @@ bool D3D11VARenderer::checkDecoderSupport(IDXGIAdapter* adapter) break; case VIDEO_FORMAT_H265_MAIN10: - if (FAILED(videoDevice->CheckVideoDecoderFormat(&D3D11_DECODER_PROFILE_HEVC_VLD_MAIN10, DXGI_FORMAT_P010, &supported))) { + if (FAILED(m_VideoDevice->CheckVideoDecoderFormat(&D3D11_DECODER_PROFILE_HEVC_VLD_MAIN10, DXGI_FORMAT_P010, &supported))) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "GPU doesn't support HEVC Main10 decoding"); return false; @@ -1084,7 +2128,7 @@ bool D3D11VARenderer::checkDecoderSupport(IDXGIAdapter* adapter) break; case VIDEO_FORMAT_H265_REXT8_444: - if (FAILED(videoDevice->CheckVideoDecoderFormat(&k_D3D11_DECODER_PROFILE_HEVC_VLD_MAIN_444, DXGI_FORMAT_AYUV, &supported))) + if (FAILED(m_VideoDevice->CheckVideoDecoderFormat(&k_D3D11_DECODER_PROFILE_HEVC_VLD_MAIN_444, DXGI_FORMAT_AYUV, &supported))) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "GPU doesn't support HEVC Main 444 8-bit decoding via D3D11VA"); @@ -1098,7 +2142,7 @@ bool D3D11VARenderer::checkDecoderSupport(IDXGIAdapter* adapter) break; case VIDEO_FORMAT_H265_REXT10_444: - if (FAILED(videoDevice->CheckVideoDecoderFormat(&k_D3D11_DECODER_PROFILE_HEVC_VLD_MAIN10_444, DXGI_FORMAT_Y410, &supported))) { + if (FAILED(m_VideoDevice->CheckVideoDecoderFormat(&k_D3D11_DECODER_PROFILE_HEVC_VLD_MAIN10_444, DXGI_FORMAT_Y410, &supported))) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "GPU doesn't support HEVC Main 444 10-bit decoding via D3D11VA"); return false; @@ -1111,7 +2155,7 @@ bool D3D11VARenderer::checkDecoderSupport(IDXGIAdapter* adapter) break; case VIDEO_FORMAT_AV1_MAIN8: - if (FAILED(videoDevice->CheckVideoDecoderFormat(&D3D11_DECODER_PROFILE_AV1_VLD_PROFILE0, DXGI_FORMAT_NV12, &supported))) { + if (FAILED(m_VideoDevice->CheckVideoDecoderFormat(&D3D11_DECODER_PROFILE_AV1_VLD_PROFILE0, DXGI_FORMAT_NV12, &supported))) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "GPU doesn't support AV1 decoding"); return false; @@ -1124,7 +2168,7 @@ bool D3D11VARenderer::checkDecoderSupport(IDXGIAdapter* adapter) break; case VIDEO_FORMAT_AV1_MAIN10: - if (FAILED(videoDevice->CheckVideoDecoderFormat(&D3D11_DECODER_PROFILE_AV1_VLD_PROFILE0, DXGI_FORMAT_P010, &supported))) { + if (FAILED(m_VideoDevice->CheckVideoDecoderFormat(&D3D11_DECODER_PROFILE_AV1_VLD_PROFILE0, DXGI_FORMAT_P010, &supported))) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "GPU doesn't support AV1 Main 10-bit decoding"); return false; @@ -1137,7 +2181,7 @@ bool D3D11VARenderer::checkDecoderSupport(IDXGIAdapter* adapter) break; case VIDEO_FORMAT_AV1_HIGH8_444: - if (FAILED(videoDevice->CheckVideoDecoderFormat(&D3D11_DECODER_PROFILE_AV1_VLD_PROFILE1, DXGI_FORMAT_AYUV, &supported))) { + if (FAILED(m_VideoDevice->CheckVideoDecoderFormat(&D3D11_DECODER_PROFILE_AV1_VLD_PROFILE1, DXGI_FORMAT_AYUV, &supported))) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "GPU doesn't support AV1 High 444 8-bit decoding"); return false; @@ -1150,7 +2194,7 @@ bool D3D11VARenderer::checkDecoderSupport(IDXGIAdapter* adapter) break; case VIDEO_FORMAT_AV1_HIGH10_444: - if (FAILED(videoDevice->CheckVideoDecoderFormat(&D3D11_DECODER_PROFILE_AV1_VLD_PROFILE1, DXGI_FORMAT_Y410, &supported))) { + if (FAILED(m_VideoDevice->CheckVideoDecoderFormat(&D3D11_DECODER_PROFILE_AV1_VLD_PROFILE1, DXGI_FORMAT_Y410, &supported))) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "GPU doesn't support AV1 High 444 10-bit decoding"); return false; @@ -1263,7 +2307,7 @@ bool D3D11VARenderer::setupRenderingResources() QByteArray vertexShaderBytecode = Path::readDataFile("d3d11_vertex.fxc"); ComPtr vertexShader; - hr = m_Device->CreateVertexShader(vertexShaderBytecode.constData(), vertexShaderBytecode.length(), nullptr, &vertexShader); + hr = m_Device->CreateVertexShader(vertexShaderBytecode.constData(), vertexShaderBytecode.length(), nullptr, vertexShader.GetAddressOf()); if (SUCCEEDED(hr)) { m_DeviceContext->VSSetShader(vertexShader.Get(), nullptr, 0); } @@ -1280,7 +2324,7 @@ bool D3D11VARenderer::setupRenderingResources() { "TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 8, D3D11_INPUT_PER_VERTEX_DATA, 0 }, }; ComPtr inputLayout; - hr = m_Device->CreateInputLayout(vertexDesc, ARRAYSIZE(vertexDesc), vertexShaderBytecode.constData(), vertexShaderBytecode.length(), &inputLayout); + hr = m_Device->CreateInputLayout(vertexDesc, ARRAYSIZE(vertexDesc), vertexShaderBytecode.constData(), vertexShaderBytecode.length(), inputLayout.GetAddressOf()); if (SUCCEEDED(hr)) { m_DeviceContext->IASetInputLayout(inputLayout.Get()); } @@ -1295,7 +2339,7 @@ bool D3D11VARenderer::setupRenderingResources() { QByteArray overlayPixelShaderBytecode = Path::readDataFile("d3d11_overlay_pixel.fxc"); - hr = m_Device->CreatePixelShader(overlayPixelShaderBytecode.constData(), overlayPixelShaderBytecode.length(), nullptr, &m_OverlayPixelShader); + hr = m_Device->CreatePixelShader(overlayPixelShaderBytecode.constData(), overlayPixelShaderBytecode.length(), nullptr, m_OverlayPixelShader.GetAddressOf()); if (FAILED(hr)) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "ID3D11Device::CreatePixelShader() failed: %x", @@ -1307,8 +2351,7 @@ bool D3D11VARenderer::setupRenderingResources() for (int i = 0; i < PixelShaders::_COUNT; i++) { QByteArray videoPixelShaderBytecode = Path::readDataFile(k_VideoShaderNames[i]); - - hr = m_Device->CreatePixelShader(videoPixelShaderBytecode.constData(), videoPixelShaderBytecode.length(), nullptr, &m_VideoPixelShaders[i]); + hr = m_Device->CreatePixelShader(videoPixelShaderBytecode.constData(), videoPixelShaderBytecode.length(), nullptr, m_VideoPixelShaders[i].GetAddressOf()); if (FAILED(hr)) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "ID3D11Device::CreatePixelShader() failed: %x", @@ -1331,7 +2374,7 @@ bool D3D11VARenderer::setupRenderingResources() samplerDesc.MaxLOD = D3D11_FLOAT32_MAX; ComPtr sampler; - hr = m_Device->CreateSamplerState(&samplerDesc, &sampler); + hr = m_Device->CreateSamplerState(&samplerDesc, sampler.GetAddressOf()); if (SUCCEEDED(hr)) { m_DeviceContext->PSSetSamplers(0, 1, sampler.GetAddressOf()); } @@ -1345,8 +2388,7 @@ bool D3D11VARenderer::setupRenderingResources() // Create our render target view { - ComPtr backBufferResource; - hr = m_SwapChain->GetBuffer(0, __uuidof(ID3D11Resource), (void**)&backBufferResource); + hr = m_SwapChain->GetBuffer(0, __uuidof(ID3D11Resource), (void**)m_BackBufferResource.GetAddressOf()); if (FAILED(hr)) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "IDXGISwapChain::GetBuffer() failed: %x", @@ -1354,7 +2396,7 @@ bool D3D11VARenderer::setupRenderingResources() return false; } - hr = m_Device->CreateRenderTargetView(backBufferResource.Get(), nullptr, &m_RenderTargetView); + hr = m_Device->CreateRenderTargetView(m_BackBufferResource.Get(), nullptr, m_RenderTargetView.GetAddressOf()); if (FAILED(hr)) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "ID3D11Device::CreateRenderTargetView() failed: %x", @@ -1379,7 +2421,7 @@ bool D3D11VARenderer::setupRenderingResources() indexBufferData.SysMemPitch = sizeof(int); ComPtr indexBuffer; - hr = m_Device->CreateBuffer(&indexBufferDesc, &indexBufferData, &indexBuffer); + hr = m_Device->CreateBuffer(&indexBufferDesc, &indexBufferData, indexBuffer.GetAddressOf()); if (SUCCEEDED(hr)) { m_DeviceContext->IASetIndexBuffer(indexBuffer.Get(), DXGI_FORMAT_R32_UINT, 0); } @@ -1395,14 +2437,20 @@ bool D3D11VARenderer::setupRenderingResources() { // Scale video to the window size while preserving aspect ratio SDL_Rect src, dst; - src.x = src.y = 0; - src.w = m_DecoderParams.width; - src.h = m_DecoderParams.height; - dst.x = dst.y = 0; - dst.w = m_DisplayWidth; - dst.h = m_DisplayHeight; - StreamUtils::scaleSourceToDestinationSurface(&src, &dst); - + if(m_AmfInitialized){ + // We use the full window for AMF as AMF keeps the picture ratio with black border around. + dst.x = dst.y = 0; + dst.w = m_DisplayWidth; + dst.h = m_DisplayHeight; + } else { + src.x = src.y = 0; + src.w = m_DecoderParams.width; + src.h = m_DecoderParams.height; + dst.x = dst.y = 0; + dst.w = m_DisplayWidth; + dst.h = m_DisplayHeight; + StreamUtils::scaleSourceToDestinationSurface(&src, &dst); + } // Convert screen space to normalized device coordinates SDL_FRect renderRect; StreamUtils::screenSpaceToNormalizedDeviceCoords(&dst, &renderRect, m_DisplayWidth, m_DisplayHeight); @@ -1413,12 +2461,12 @@ bool D3D11VARenderer::setupRenderingResources() float vMax = m_BindDecoderOutputTextures ? ((float)m_DecoderParams.height / FFALIGN(m_DecoderParams.height, m_TextureAlignment)) : 1.0f; VERTEX verts[] = - { - {renderRect.x, renderRect.y, 0, vMax}, - {renderRect.x, renderRect.y+renderRect.h, 0, 0}, - {renderRect.x+renderRect.w, renderRect.y, uMax, vMax}, - {renderRect.x+renderRect.w, renderRect.y+renderRect.h, uMax, 0}, - }; + { + {renderRect.x, renderRect.y, 0, vMax}, + {renderRect.x, renderRect.y+renderRect.h, 0, 0}, + {renderRect.x+renderRect.w, renderRect.y, uMax, vMax}, + {renderRect.x+renderRect.w, renderRect.y+renderRect.h, uMax, 0}, + }; D3D11_BUFFER_DESC vbDesc = {}; vbDesc.ByteWidth = sizeof(verts); @@ -1431,7 +2479,7 @@ bool D3D11VARenderer::setupRenderingResources() D3D11_SUBRESOURCE_DATA vbData = {}; vbData.pSysMem = verts; - hr = m_Device->CreateBuffer(&vbDesc, &vbData, &m_VideoVertexBuffer); + hr = m_Device->CreateBuffer(&vbDesc, &vbData, m_VideoVertexBuffer.GetAddressOf()); if (FAILED(hr)) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "ID3D11Device::CreateBuffer() failed: %x", @@ -1460,7 +2508,7 @@ bool D3D11VARenderer::setupRenderingResources() constData.pSysMem = chromaUVMax; ComPtr constantBuffer; - HRESULT hr = m_Device->CreateBuffer(&constDesc, &constData, &constantBuffer); + HRESULT hr = m_Device->CreateBuffer(&constDesc, &constData, constantBuffer.GetAddressOf()); if (SUCCEEDED(hr)) { m_DeviceContext->PSSetConstantBuffers(0, 1, constantBuffer.GetAddressOf()); } @@ -1487,7 +2535,7 @@ bool D3D11VARenderer::setupRenderingResources() blendDesc.RenderTarget[0].RenderTargetWriteMask = D3D11_COLOR_WRITE_ENABLE_ALL; ComPtr blendState; - hr = m_Device->CreateBlendState(&blendDesc, &blendState); + hr = m_Device->CreateBlendState(&blendDesc, blendState.GetAddressOf()); if (SUCCEEDED(hr)) { m_DeviceContext->OMSetBlendState(blendState.Get(), nullptr, 0xffffffff); } @@ -1531,6 +2579,45 @@ std::vector D3D11VARenderer::getVideoTextureSRVFormats() } } +/** + * \brief Set the Texture used by AMD AMF + * + * Set a YUV texture to be processed by AMD AMF to upscale and denoise + * + * \return bool Returns true if the texture is created + */ +bool D3D11VARenderer::setupAmfTexture() +{ + // Texture description + D3D11_TEXTURE2D_DESC texDesc = {}; + // Same size as the input Frame + texDesc.Width = m_DecoderParams.width; + texDesc.Height = m_DecoderParams.height; + texDesc.MipLevels = 1; + texDesc.ArraySize = 1; + texDesc.Format = m_TextureFormat; + texDesc.SampleDesc.Quality = 0; + texDesc.SampleDesc.Count = 1; + texDesc.Usage = D3D11_USAGE_DEFAULT; + texDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_RENDER_TARGET; + texDesc.CPUAccessFlags = 0; + texDesc.MiscFlags = D3D11_RESOURCE_MISC_SHARED; + HRESULT hr = m_Device->CreateTexture2D(&texDesc, nullptr, m_AmfTexture.GetAddressOf()); + if (FAILED(hr)) { + // Handle error + return false; + } + + return true; +} + +/** + * \brief Set the Texture used by the Shaders + * + * Set a YUV texture to be processed by the shaders to convert to colorisatin to RGBA + * + * \return bool Returns true if the texture is created + */ bool D3D11VARenderer::setupVideoTexture() { SDL_assert(!m_BindDecoderOutputTextures); @@ -1538,19 +2625,33 @@ bool D3D11VARenderer::setupVideoTexture() HRESULT hr; D3D11_TEXTURE2D_DESC texDesc = {}; - texDesc.Width = m_DecoderParams.width; - texDesc.Height = m_DecoderParams.height; + // Size of the output texture + if(m_VideoEnhancement->isVideoEnhancementEnabled()){ + texDesc.Width = m_DisplayWidth; + texDesc.Height = m_DisplayHeight; + } else { + texDesc.Width = m_DecoderParams.width; + texDesc.Height = m_DecoderParams.height; + } + texDesc.MipLevels = 1; texDesc.ArraySize = 1; - texDesc.Format = m_TextureFormat; + texDesc.Format = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) ? DXGI_FORMAT_P010 : DXGI_FORMAT_NV12; texDesc.SampleDesc.Quality = 0; texDesc.SampleDesc.Count = 1; texDesc.Usage = D3D11_USAGE_DEFAULT; texDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE; + // The flag D3D11_BIND_RENDER_TARGET is needed to enable the use of GPU enhancement + if(m_VideoEnhancement->isVideoEnhancementEnabled()){ + texDesc.BindFlags |= D3D11_BIND_RENDER_TARGET; + } texDesc.CPUAccessFlags = 0; texDesc.MiscFlags = 0; + if(m_AmfInitialized){ + texDesc.MiscFlags |= D3D11_RESOURCE_MISC_SHARED; + } - hr = m_Device->CreateTexture2D(&texDesc, nullptr, &m_VideoTexture); + hr = m_Device->CreateTexture2D(&texDesc, nullptr, m_VideoTexture.GetAddressOf()); if (FAILED(hr)) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "ID3D11Device::CreateTexture2D() failed: %x", @@ -1622,3 +2723,47 @@ bool D3D11VARenderer::setupTexturePoolViews(AVD3D11VAFramesContext* frameContext return true; } + +/** + * \brief Set the Texture used by the Video Processor + * + * Set a RGBA texture to be processed by the Video processor to upscale and denoise + * + * \return bool Returns true if the texture is created + */ +bool D3D11VARenderer::setupEnhancedTexture() +{ + HRESULT hr; + D3D11_TEXTURE2D_DESC texDesc = {}; + + // Size of the output texture + if(m_AmfInitialized){ + texDesc.Width = m_OutputTexture.width; + texDesc.Height = m_OutputTexture.height; + } else { + texDesc.Width = m_DecoderParams.width; + texDesc.Height = m_DecoderParams.height; + } + texDesc.MipLevels = 1; + texDesc.ArraySize = 1; + texDesc.Format = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) ? DXGI_FORMAT_P010 : DXGI_FORMAT_NV12; + texDesc.SampleDesc.Quality = 0; + texDesc.SampleDesc.Count = 1; + texDesc.Usage = D3D11_USAGE_DEFAULT; + texDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_RENDER_TARGET; + texDesc.CPUAccessFlags = 0; + texDesc.MiscFlags = 0; + if(m_AmfInitialized){ + texDesc.MiscFlags |= D3D11_RESOURCE_MISC_SHARED; + } + + hr = m_Device->CreateTexture2D(&texDesc, nullptr, m_EnhancedTexture.GetAddressOf()); + if (FAILED(hr)) { + SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, + "ID3D11Device::CreateTexture2D() failed: %x", + hr); + return false; + } + + return true; +} diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.h b/app/streaming/video/ffmpeg-renderers/d3d11va.h index f37b815ad..763b5baa5 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.h +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.h @@ -4,6 +4,10 @@ #include #include +#include +#include +#include "streaming/video/videoenhancement.h" +#include "public/common/AMFFactory.h" extern "C" { #include @@ -24,6 +28,7 @@ class D3D11VARenderer : public IFFmpegRenderer virtual int getRendererAttributes() override; virtual int getDecoderCapabilities() override; virtual bool needsTestFrame() override; + virtual void setHdrMode(bool enabled) override; virtual InitFailureReason getInitFailureReason() override; enum PixelShaders { @@ -40,18 +45,33 @@ class D3D11VARenderer : public IFFmpegRenderer static void unlockContext(void* lock_ctx); bool setupRenderingResources(); + bool setupAmfTexture(); std::vector getVideoTextureSRVFormats(); bool setupVideoTexture(); // for !m_BindDecoderOutputTextures bool setupTexturePoolViews(AVD3D11VAFramesContext* frameContext); // for m_BindDecoderOutputTextures + bool setupEnhancedTexture(); void renderOverlay(Overlay::OverlayType type); void bindColorConversion(AVFrame* frame); + void prepareEnhancedOutput(AVFrame* frame); void renderVideo(AVFrame* frame); + bool createVideoProcessor(); + bool initializeVideoProcessor(); + bool enableAMDVideoSuperResolution(bool activate = true, bool logInfo = true); + bool enableIntelVideoSuperResolution(bool activate = true, bool logInfo = true); + bool enableNvidiaVideoSuperResolution(bool activate = true, bool logInfo = true); + bool enableAMDHDR(bool activate = true, bool logInfo = true); + bool enableIntelHDR(bool activate = true, bool logInfo = true); + bool enableNvidiaHDR(bool activate = true, bool logInfo = true); bool checkDecoderSupport(IDXGIAdapter* adapter); + int getAdapterIndexByEnhancementCapabilities(); bool createDeviceByAdapterIndex(int adapterIndex, bool* adapterNotFound = nullptr); int m_DecoderSelectionPass; - int m_DevicesWithFL11Support; int m_DevicesWithCodecSupport; + int m_DevicesWithFL11Support; + + int m_AdapterIndex = 0; + int m_OutputIndex = 0; enum class SupportedFenceType { None, @@ -67,6 +87,19 @@ class D3D11VARenderer : public IFFmpegRenderer SupportedFenceType m_FenceType; SDL_mutex* m_ContextLock; bool m_BindDecoderOutputTextures; + D3D11_BOX m_SrcBox; + + Microsoft::WRL::ComPtr m_VideoDevice; + Microsoft::WRL::ComPtr m_VideoContext; + Microsoft::WRL::ComPtr m_VideoProcessor; + Microsoft::WRL::ComPtr m_VideoProcessorEnumerator; + D3D11_VIDEO_PROCESSOR_CAPS m_VideoProcessorCapabilities; + D3D11_VIDEO_PROCESSOR_STREAM m_StreamData; + Microsoft::WRL::ComPtr m_OutputView; + Microsoft::WRL::ComPtr m_InputView; + Microsoft::WRL::ComPtr m_BackBufferResource; + VideoEnhancement* m_VideoEnhancement; + bool m_AutoStreamSuperResolution = false; bool m_UseFenceHack; DECODER_PARAMETERS m_DecoderParams; @@ -83,13 +116,22 @@ class D3D11VARenderer : public IFFmpegRenderer std::array, PixelShaders::_COUNT> m_VideoPixelShaders; Microsoft::WRL::ComPtr m_VideoVertexBuffer; + Microsoft::WRL::ComPtr m_AmfTexture; // Only valid if !m_BindDecoderOutputTextures Microsoft::WRL::ComPtr m_VideoTexture; + Microsoft::WRL::ComPtr m_EnhancedTexture; // Only index 0 is valid if !m_BindDecoderOutputTextures #define DECODER_BUFFER_POOL_SIZE 17 std::array, 2>, DECODER_BUFFER_POOL_SIZE> m_VideoTextureResourceViews; + struct { + int width; + int height; + int left; + int top; + } m_OutputTexture; + SDL_SpinLock m_OverlayLock; std::array, Overlay::OverlayMax> m_OverlayVertexBuffers; std::array, Overlay::OverlayMax> m_OverlayTextures; @@ -98,5 +140,15 @@ class D3D11VARenderer : public IFFmpegRenderer AVBufferRef* m_HwDeviceContext; AVBufferRef* m_HwFramesContext; -}; + // AMD (AMF) + amf::AMFContextPtr m_AmfContext; + amf::AMFSurfacePtr m_AmfSurface; + amf::AMFDataPtr m_AmfData; + // amf::AMFComponentPtr does not work for m_AmfUpScaler, have to use raw pointer + amf::AMFComponent* m_AmfUpScaler; + bool m_AmfInitialized = false; + bool m_AmfUpScalerSharpness = false; + amf::AMF_SURFACE_FORMAT m_AmfUpScalerSurfaceFormat; + +}; diff --git a/app/streaming/video/ffmpeg-renderers/vt_metal.mm b/app/streaming/video/ffmpeg-renderers/vt_metal.mm index 61f15a7f9..ea833e630 100644 --- a/app/streaming/video/ffmpeg-renderers/vt_metal.mm +++ b/app/streaming/video/ffmpeg-renderers/vt_metal.mm @@ -15,9 +15,11 @@ #import #import #import -#import +#import #import +#include "streaming/video/videoenhancement.h" + extern "C" { #include } @@ -137,8 +139,15 @@ m_LastDrawableHeight(-1), m_PresentationMutex(SDL_CreateMutex()), m_PresentationCond(SDL_CreateCond()), - m_PendingPresentationCount(0) + m_PendingPresentationCount(0), + m_LumaTexture(nullptr), + m_LumaUpscaledTexture(nullptr), + m_LumaUpscaler(nullptr), + m_ChromaTexture(nullptr), + m_ChromaUpscaledTexture(nullptr), + m_ChromaUpscaler(nullptr) { + m_VideoEnhancement = &VideoEnhancement::getInstance(); } virtual ~VTMetalRenderer() override @@ -191,10 +200,35 @@ [m_CommandQueue release]; } - if (m_TextureCache != nullptr) { - CFRelease(m_TextureCache); + if (m_LumaTexture != nullptr) { + [m_LumaTexture release]; + } + + if (m_LumaUpscaledTexture != nullptr) { + [m_LumaUpscaledTexture release]; + } + + if (m_LumaUpscaler != nullptr) { + [m_LumaUpscaler release]; } + if (m_ChromaTexture != nullptr) { + [m_ChromaTexture release]; + } + + if (m_ChromaUpscaledTexture != nullptr) { + [m_ChromaUpscaledTexture release]; + } + + if (m_ChromaUpscaler != nullptr) { + [m_ChromaUpscaler release]; + } + + // Note: CFRelease makes the application crash sometime as the m_TextureCache seems to be cleared before it is called + // if (m_TextureCache != nullptr) { + // CFRelease(m_TextureCache); + // } + if (m_MetalView != nullptr) { SDL_Metal_DestroyView(m_MetalView); } @@ -504,73 +538,152 @@ virtual void renderFrame(AVFrame* frame) override return; } - std::array cvMetalTextures; size_t planes = getFramePlaneCount(frame); SDL_assert(planes <= MAX_VIDEO_PLANES); + CVPixelBufferRef pixBuf = reinterpret_cast(frame->data[3]); if (frame->format == AV_PIX_FMT_VIDEOTOOLBOX) { - CVPixelBufferRef pixBuf = reinterpret_cast(frame->data[3]); // Create Metal textures for the planes of the CVPixelBuffer - for (size_t i = 0; i < planes; i++) { - MTLPixelFormat fmt; - - switch (CVPixelBufferGetPixelFormatType(pixBuf)) { - case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: - case kCVPixelFormatType_444YpCbCr8BiPlanarVideoRange: - case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange: - case kCVPixelFormatType_444YpCbCr8BiPlanarFullRange: - fmt = (i == 0) ? MTLPixelFormatR8Unorm : MTLPixelFormatRG8Unorm; - break; - - case kCVPixelFormatType_420YpCbCr10BiPlanarFullRange: - case kCVPixelFormatType_444YpCbCr10BiPlanarFullRange: - case kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange: - case kCVPixelFormatType_444YpCbCr10BiPlanarVideoRange: - fmt = (i == 0) ? MTLPixelFormatR16Unorm : MTLPixelFormatRG16Unorm; - break; - - default: - SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, - "Unknown pixel format: %x", - CVPixelBufferGetPixelFormatType(pixBuf)); - return; - } - CVReturn err = CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, m_TextureCache, pixBuf, nullptr, fmt, - CVPixelBufferGetWidthOfPlane(pixBuf, i), - CVPixelBufferGetHeightOfPlane(pixBuf, i), - i, - &cvMetalTextures[i]); - if (err != kCVReturnSuccess) { - SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, - "CVMetalTextureCacheCreateTextureFromImage() failed: %d", - err); - return; - } + switch (CVPixelBufferGetPixelFormatType(pixBuf)) { + case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: + case kCVPixelFormatType_444YpCbCr8BiPlanarVideoRange: + case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange: + case kCVPixelFormatType_444YpCbCr8BiPlanarFullRange: + case kCVPixelFormatType_Lossless_420YpCbCr8BiPlanarVideoRange: + case kCVPixelFormatType_Lossless_420YpCbCr8BiPlanarFullRange: + m_LumaPixelFormart = MTLPixelFormatR8Unorm; + m_ChromaPixelFormart = MTLPixelFormatRG8Unorm; + break; + case kCVPixelFormatType_420YpCbCr10BiPlanarFullRange: + case kCVPixelFormatType_444YpCbCr10BiPlanarFullRange: + case kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange: + case kCVPixelFormatType_444YpCbCr10BiPlanarVideoRange: + m_LumaPixelFormart = MTLPixelFormatR16Unorm; + m_ChromaPixelFormart = MTLPixelFormatRG16Unorm; + break; + default: + SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, + "Unknown pixel format: %x", + CVPixelBufferGetPixelFormatType(pixBuf)); + return; + } + + CVReturn err; + + err = CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, + m_TextureCache, + pixBuf, + nullptr, + m_LumaPixelFormart, + CVPixelBufferGetWidthOfPlane(pixBuf, 0), + CVPixelBufferGetHeightOfPlane(pixBuf, 0), + 0, + &m_cvLumaTexture); + if (err != kCVReturnSuccess) { + SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, + "CVMetalTextureCacheCreateTextureFromImage() failed: %d", + err); + return; + } + + err = CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, + m_TextureCache, + pixBuf, + nullptr, + m_ChromaPixelFormart, + CVPixelBufferGetWidthOfPlane(pixBuf, 1), + CVPixelBufferGetHeightOfPlane(pixBuf, 1), + 1, + &m_cvChromaTexture); + if (err != kCVReturnSuccess) { + SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, + "CVMetalTextureCacheCreateTextureFromImage() failed: %d", + err); + return; } } // Prepare a render pass to render into the next drawable - auto renderPassDescriptor = [MTLRenderPassDescriptor renderPassDescriptor]; + MTLRenderPassDescriptor *renderPassDescriptor = [MTLRenderPassDescriptor renderPassDescriptor]; renderPassDescriptor.colorAttachments[0].texture = m_NextDrawable.texture; renderPassDescriptor.colorAttachments[0].loadAction = MTLLoadActionClear; renderPassDescriptor.colorAttachments[0].clearColor = MTLClearColorMake(0.0, 0.0, 0.0, 0.0); renderPassDescriptor.colorAttachments[0].storeAction = MTLStoreActionStore; - auto commandBuffer = [m_CommandQueue commandBuffer]; - auto renderEncoder = [commandBuffer renderCommandEncoderWithDescriptor:renderPassDescriptor]; + + id commandBuffer = [m_CommandQueue commandBuffer]; + id renderEncoder = [commandBuffer renderCommandEncoderWithDescriptor:renderPassDescriptor]; + + if(frame->format == AV_PIX_FMT_VIDEOTOOLBOX && m_VideoEnhancement->isVideoEnhancementEnabled()){ + m_LumaWidth = CVPixelBufferGetWidthOfPlane(pixBuf, 0); + m_LumaHeight = CVPixelBufferGetHeightOfPlane(pixBuf, 0); + m_ChromaWidth = CVPixelBufferGetWidthOfPlane(pixBuf, 1); + m_ChromaHeight = CVPixelBufferGetHeightOfPlane(pixBuf, 1); + + // Setup the Spacial scaler for Luma texture + if(m_LumaUpscaler == nullptr){ + MTLFXSpatialScalerDescriptor* Ldescriptor = [MTLFXSpatialScalerDescriptor new]; + Ldescriptor.inputWidth = m_LumaWidth; + Ldescriptor.inputHeight = m_LumaHeight; + Ldescriptor.outputWidth = m_LastDrawableWidth; + Ldescriptor.outputHeight = m_LastDrawableHeight; + Ldescriptor.colorTextureFormat = m_LumaPixelFormart; + Ldescriptor.outputTextureFormat = m_LumaPixelFormart; + Ldescriptor.colorProcessingMode = MTLFXSpatialScalerColorProcessingModeLinear; // Linear has a better color rendering than Perceptual + m_LumaUpscaler = [Ldescriptor newSpatialScalerWithDevice:m_MetalLayer.device]; + + MTLTextureDescriptor *LtextureDescriptor = [[MTLTextureDescriptor alloc] init]; + LtextureDescriptor.pixelFormat = m_LumaPixelFormart; + LtextureDescriptor.width = m_LastDrawableWidth; + LtextureDescriptor.height = m_LastDrawableHeight; + LtextureDescriptor.storageMode = MTLStorageModePrivate; + LtextureDescriptor.usage = MTLTextureUsageShaderRead | MTLTextureUsageRenderTarget; + + m_LumaUpscaledTexture = [m_MetalLayer.device newTextureWithDescriptor:LtextureDescriptor]; + } + + // Setup the Spacial scaler for Chroma texture + if(m_ChromaUpscaler == nullptr){ + MTLFXSpatialScalerDescriptor* Cdescriptor = [MTLFXSpatialScalerDescriptor new]; + Cdescriptor.inputWidth = m_ChromaWidth; + Cdescriptor.inputHeight = m_ChromaHeight; + Cdescriptor.outputWidth = m_LastDrawableWidth; + Cdescriptor.outputHeight = m_LastDrawableHeight; + Cdescriptor.colorTextureFormat = m_ChromaPixelFormart; + Cdescriptor.outputTextureFormat = m_ChromaPixelFormart; + Cdescriptor.colorProcessingMode = MTLFXSpatialScalerColorProcessingModeLinear; // Linear has a better color rendering than Perceptual + m_ChromaUpscaler = [Cdescriptor newSpatialScalerWithDevice:m_MetalLayer.device]; + + MTLTextureDescriptor* CtextureDescriptor = [[MTLTextureDescriptor alloc] init]; + CtextureDescriptor.pixelFormat = m_ChromaPixelFormart; + CtextureDescriptor.width = m_LastDrawableWidth; + CtextureDescriptor.height = m_LastDrawableHeight; + CtextureDescriptor.storageMode = MTLStorageModePrivate; + CtextureDescriptor.usage = MTLTextureUsageShaderRead | MTLTextureUsageRenderTarget; + + m_ChromaUpscaledTexture = [m_MetalLayer.device newTextureWithDescriptor:CtextureDescriptor]; + } + } // Bind textures and buffers then draw the video region [renderEncoder setRenderPipelineState:m_VideoPipelineState]; if (frame->format == AV_PIX_FMT_VIDEOTOOLBOX) { - for (size_t i = 0; i < planes; i++) { - [renderEncoder setFragmentTexture:CVMetalTextureGetTexture(cvMetalTextures[i]) atIndex:i]; + if(m_VideoEnhancement->isVideoEnhancementEnabled()){ + // Use scaled textures + [renderEncoder setFragmentTexture:m_LumaUpscaledTexture atIndex:0]; + [renderEncoder setFragmentTexture:m_ChromaUpscaledTexture atIndex:1]; + m_LumaTexture = CVMetalTextureGetTexture(m_cvLumaTexture); + m_ChromaTexture = CVMetalTextureGetTexture(m_cvChromaTexture); + } else { + [renderEncoder setFragmentTexture:CVMetalTextureGetTexture(m_cvLumaTexture) atIndex:0]; + [renderEncoder setFragmentTexture:CVMetalTextureGetTexture(m_cvChromaTexture) atIndex:1]; } + [commandBuffer addCompletedHandler:^(id) { // Free textures after completion of rendering per CVMetalTextureCache requirements - for (size_t i = 0; i < planes; i++) { - CFRelease(cvMetalTextures[i]); - } + if(m_cvLumaTexture != nullptr) CFRelease(m_cvLumaTexture); + if(m_cvChromaTexture != nullptr) CFRelease(m_cvChromaTexture); }]; } else { @@ -642,6 +755,16 @@ virtual void renderFrame(AVFrame* frame) override }]; } + if(frame->format == AV_PIX_FMT_VIDEOTOOLBOX && m_VideoEnhancement->isVideoEnhancementEnabled()){ + m_LumaUpscaler.colorTexture = m_LumaTexture; + m_LumaUpscaler.outputTexture = m_LumaUpscaledTexture; + m_ChromaUpscaler.colorTexture = m_ChromaTexture; + m_ChromaUpscaler.outputTexture = m_ChromaUpscaledTexture; + + [m_LumaUpscaler encodeToCommandBuffer:commandBuffer]; + [m_ChromaUpscaler encodeToCommandBuffer:commandBuffer]; + } + // Flip to the newly rendered buffer [commandBuffer presentDrawable:m_NextDrawable]; [commandBuffer commit]; @@ -695,6 +818,7 @@ virtual bool initialize(PDECODER_PARAMETERS params) override int err; m_Window = params->window; + m_DecoderParams = *params; id device = getMetalDevice(); if (!device) { @@ -709,6 +833,19 @@ virtual bool initialize(PDECODER_PARAMETERS params) override return false; } + if (@available(macOS 13.0, *)) { + // Video Super Resolution from MetalFX is available starting from MacOS 13+ + m_VideoEnhancement->setVSRcapable(true); + m_VideoEnhancement->setHDRcapable(false); + // Enable the visibility of Video enhancement feature in the settings of the User interface + m_VideoEnhancement->enableUIvisible(); + } + + if(m_VideoEnhancement->isEnhancementCapable()){ + // Check if the user has enable Video enhancement + m_VideoEnhancement->enableVideoEnhancement(m_DecoderParams.enableVideoEnhancement); + } + err = av_hwdevice_ctx_create(&m_HwContext, AV_HWDEVICE_TYPE_VIDEOTOOLBOX, nullptr, @@ -775,6 +912,7 @@ virtual bool initialize(PDECODER_PARAMETERS params) override // Create a command queue for submission m_CommandQueue = [m_MetalLayer.device newCommandQueue]; + return true; }} @@ -939,6 +1077,23 @@ bool notifyWindowChanged(PWINDOW_STATE_CHANGE_INFO info) override SDL_mutex* m_PresentationMutex; SDL_cond* m_PresentationCond; int m_PendingPresentationCount; + + VideoEnhancement* m_VideoEnhancement; + DECODER_PARAMETERS m_DecoderParams; + id m_LumaTexture; + id m_LumaUpscaledTexture; + id m_LumaUpscaler; + id m_ChromaTexture; + id m_ChromaUpscaledTexture; + id m_ChromaUpscaler; + size_t m_LumaWidth; + size_t m_LumaHeight; + size_t m_ChromaWidth; + size_t m_ChromaHeight; + MTLPixelFormat m_LumaPixelFormart; + MTLPixelFormat m_ChromaPixelFormart; + CVMetalTextureRef m_cvLumaTexture; + CVMetalTextureRef m_cvChromaTexture; }; IFFmpegRenderer* VTMetalRendererFactory::createRenderer(bool hwAccel) { diff --git a/app/streaming/video/ffmpeg.cpp b/app/streaming/video/ffmpeg.cpp index c17487e90..0b69a5de7 100644 --- a/app/streaming/video/ffmpeg.cpp +++ b/app/streaming/video/ffmpeg.cpp @@ -234,7 +234,8 @@ FFmpegVideoDecoder::FFmpegVideoDecoder(bool testOnly) m_VideoFormat(0), m_NeedsSpsFixup(false), m_TestOnly(testOnly), - m_DecoderThread(nullptr) + m_DecoderThread(nullptr), + m_VideoEnhancement(&VideoEnhancement::getInstance()) { SDL_zero(m_ActiveWndVideoStats); SDL_zero(m_LastWndVideoStats); @@ -784,15 +785,22 @@ void FFmpegVideoDecoder::stringifyVideoStats(VIDEO_STATS& stats, char* output, i break; } + // Display if AI-Enhancement is enabled + const char* aiEnhanced = ""; + if(m_VideoEnhancement->isVideoEnhancementEnabled()){ + aiEnhanced = "AI-Enhanced"; + } + if (stats.receivedFps > 0) { if (m_VideoDecoderCtx != nullptr) { ret = snprintf(&output[offset], length - offset, - "Video stream: %dx%d %.2f FPS (Codec: %s)\n", + "Video stream: %dx%d %.2f FPS (Codec: %s) %s\n", m_VideoDecoderCtx->width, m_VideoDecoderCtx->height, stats.totalFps, - codecString); + codecString, + aiEnhanced); if (ret < 0 || ret >= length - offset) { SDL_assert(false); return; diff --git a/app/streaming/video/ffmpeg.h b/app/streaming/video/ffmpeg.h index 7dbcd503f..a4fb2f0e3 100644 --- a/app/streaming/video/ffmpeg.h +++ b/app/streaming/video/ffmpeg.h @@ -6,6 +6,7 @@ #include "decoder.h" #include "ffmpeg-renderers/renderer.h" #include "ffmpeg-renderers/pacer/pacer.h" +#include "streaming/video/videoenhancement.h" extern "C" { #include @@ -110,6 +111,7 @@ class FFmpegVideoDecoder : public IVideoDecoder { bool m_TestOnly; SDL_Thread* m_DecoderThread; SDL_atomic_t m_DecoderThreadShouldQuit; + VideoEnhancement* m_VideoEnhancement; // Data buffers in the queued DU are not valid QQueue m_FrameInfoQueue; diff --git a/app/streaming/video/videoenhancement.cpp b/app/streaming/video/videoenhancement.cpp new file mode 100644 index 000000000..f7bad2376 --- /dev/null +++ b/app/streaming/video/videoenhancement.cpp @@ -0,0 +1,216 @@ +#include "videoenhancement.h" + +/** + * \brief Constructor (Singleton) + * + * VideoEnhancement does not set its properties automatically at instance initiation, + * it depends on D3D11va. Therefore, it needs to be populated at the initialization of + * the rendered D3D11VARenderer::initialize(). + * + * \return void + */ +VideoEnhancement::VideoEnhancement(){} + +/** + * \brief Get the singleton instance + * + * Render the instance of the singleton + * + * \return VideoEnhancement instance + */ +VideoEnhancement &VideoEnhancement::getInstance(){ + static VideoEnhancement instance; + return instance; +} + +/** + * \brief Set the Adapter Index + * + * \return void + */ +void VideoEnhancement::setAdapterIndex(int adapterIndex){ + if (adapterIndex > 0){ + m_AdapterIndex = adapterIndex; + } +} + +/** + * \brief Get the Adapter Index + * + * \return int Returns the Index of the most capable adapter for Video enhancement + */ +int VideoEnhancement::getAdapterIndex(){ + return m_AdapterIndex; +} + +/** + * \brief Set Vendor ID + * + * \return void + */ +void VideoEnhancement::setVendorID(int vendorId){ + m_VendorId = vendorId; +} + +/** + * \brief Check if the vendor is AMD + * + * \return bool Returns true is the vendor is AMD + */ +bool VideoEnhancement::isVendorAMD(){ + return m_VendorId == VENDOR_ID_AMD; +} + +/** + * \brief Check if the vendor is AMD + * + * \param int vendorId Vendor ID + * \return bool Returns true is the vendor is AMD + */ +bool VideoEnhancement::isVendorAMD(int vendorId){ + return vendorId == VENDOR_ID_AMD; +} + +/** + * \brief Check if the vendor is Intel + * + * \return bool Returns true is the vendor is Intel + */ +bool VideoEnhancement::isVendorIntel(){ + return m_VendorId == VENDOR_ID_INTEL; +} + +/** + * \brief Check if the vendor is Intel + * + * \param int vendorId Vendor ID + * \return bool Returns true is the vendor is Intel + */ +bool VideoEnhancement::isVendorIntel(int vendorId){ + return vendorId == VENDOR_ID_INTEL; +} + +/** + * \brief Check if the vendor is NVIDIA + * + * \return bool Returns true is the vendor is NVIDIA + */ +bool VideoEnhancement::isVendorNVIDIA(){ + return m_VendorId == VENDOR_ID_NVIDIA; +} + +/** + * \brief Check if the vendor is NVIDIA + * + * \param int vendorId Vendor ID + * \return bool Returns true is the vendor is NVIDIA + */ +bool VideoEnhancement::isVendorNVIDIA(int vendorId){ + return vendorId == VENDOR_ID_NVIDIA; +} + +/** + * \brief Set the Video Super-Resolution capability + * + * Keep track if the adapter is capable of Video Super-Resolution + * + * \return void + */ +void VideoEnhancement::setVSRcapable(bool capable){ + m_VSRcapable = capable; +} + +/** + * \brief Check the Video Super-Resolution capability + * + * Check if the GPU used is capable of providing VSR feature + * + * \return bool Returns true if the VSR feature is available + */ +bool VideoEnhancement::isVSRcapable(){ + return m_VSRcapable; +} + +/** + * \brief Set the HDR capability + * + * Keep track if the adapter is capable of SDR to HDR + * + * \return void + */ +void VideoEnhancement::setHDRcapable(bool capable){ + m_HDRcapable = capable; +} + +/** + * \brief Check the HDR capability + * + * Check if the GPU used is capable of providing SDR to HDR feature + * + * \return bool Returns true if the HDR feature is available + */ +bool VideoEnhancement::isHDRcapable(){ + return m_HDRcapable; +} + +/** + * \brief Check the AI-Enhancement capability + * + * Check if the GPU used is capable of enhancing the video + * + * \return bool Returns true if the such capability is available + */ +bool VideoEnhancement::isEnhancementCapable(){ + return m_VSRcapable || m_HDRcapable; +} + +/** + * \brief Check if Video Enhancement feature is enabled + * + * \return bool Returns true if the Video Enhancement feature is enabled + */ +bool VideoEnhancement::isVideoEnhancementEnabled(){ + return m_Enabled; +} + +/** + * \brief Enable Video Enhancement feature + * + * \param bool activate Default is true, at true it enables the use of Video Enhancement feature + * \return bool Returns true if the Video Enhancement feature is available + */ +bool VideoEnhancement::enableVideoEnhancement(bool activate){ + m_Enabled = isEnhancementCapable() && activate; + return m_Enabled; +} + +/** + * \brief Enable Video Enhancement accessibility from the settings interface + * + * \param bool visible Default is true, at true it displays Video Enhancement feature + * \return void + */ +void VideoEnhancement::enableUIvisible(bool visible){ + m_UIvisible = visible; +} + +/** + * \brief Check if Video Enhancement feature is accessible from the settings interface + * + * \return bool Returns true if the Video Enhancement feature is accessible + */ +bool VideoEnhancement::isUIvisible(){ + return m_UIvisible; +} + +/** + * \brief Check if Video Enhancement feature is experimental from the vendor + * + * \return bool Returns true if the Video Enhancement feature is experimental + */ +bool VideoEnhancement::isExperimental(){ + // Only Intel is experimental, NVIDIA and AMD are official + // [ToDo] If Intel officially release the feature, we can return false or just delete + // this method and the QML logic associated. + return isVendorIntel(); +} diff --git a/app/streaming/video/videoenhancement.h b/app/streaming/video/videoenhancement.h new file mode 100644 index 000000000..e3e1f3755 --- /dev/null +++ b/app/streaming/video/videoenhancement.h @@ -0,0 +1,58 @@ +#ifndef VIDEOENHANCEMENT_H +#define VIDEOENHANCEMENT_H + +#pragma once + +class VideoEnhancement +{ + +private: + + static VideoEnhancement* instance; + + bool m_Enabled = false; + bool m_UIvisible = false; + bool m_VSRcapable = false; + bool m_HDRcapable = false; + + // Vendors' name (PCI Special Interest Group) + const int VENDOR_ID_AMD = 0x1002; + const int VENDOR_ID_INTEL = 0x8086; + const int VENDOR_ID_NVIDIA = 0x10DE; + + // GPU information + int m_VendorId = 0; + int m_AdapterIndex = -1; + + // Disable the constructor from outside to avoid a new instance + VideoEnhancement(); + + // Private copy constructor and assignment operator to prevent duplication + VideoEnhancement(const VideoEnhancement&); + VideoEnhancement& operator=(const VideoEnhancement&); + +public: + static VideoEnhancement& getInstance(); + void setVendorID(int vendorId); + bool isVendorAMD(); + bool isVendorAMD(int vendorId); + bool isVendorIntel(); + bool isVendorIntel(int vendorId); + bool isVendorNVIDIA(); + bool isVendorNVIDIA(int vendorId); + bool isEnhancementCapable(); + void setVSRcapable(bool capable); + bool isVSRcapable(); + void setHDRcapable(bool capable); + bool isHDRcapable(); + bool isVideoEnhancementEnabled(); + bool enableVideoEnhancement(bool activate = true); + void enableUIvisible(bool visible = true); + void setAdapterIndex(int adapterIndex); + int getAdapterIndex(); + bool isUIvisible(); + bool isExperimental(); + +}; + +#endif // VIDEOENHANCEMENT_H diff --git a/h264bitstream/h264bitstream b/h264bitstream/h264bitstream index 34f3c58af..70124d305 160000 --- a/h264bitstream/h264bitstream +++ b/h264bitstream/h264bitstream @@ -1 +1 @@ -Subproject commit 34f3c58afa3c47b6cf0a49308a68cbf89c5e0bff +Subproject commit 70124d3051ba45e6b326264f0b25e6f48a7479e7 diff --git a/third-party/AMF b/third-party/AMF new file mode 160000 index 000000000..85eea8d43 --- /dev/null +++ b/third-party/AMF @@ -0,0 +1 @@ +Subproject commit 85eea8d43511967dcf98f063d3d3efa573536ae3