#include "DxgiCapturer.h" #include "basic/basic.h" #include "basic/frame.h" #include "d3d/buffer_filler.h" #include "d3d/convert.h" #include "d3d/gen_frame.h" #include "../finder.h" #include #include #include #include #include namespace avrecorder { namespace video { // --- DxgiCapturerPrivate 实现 --- bool DxgiCapturerPrivate::Open(int left, int top, int width, int height) { std::lock_guard lock(_deviceMutex); CloseInternal(); // 使用内部方法避免死锁 HRESULT hr = S_OK; _isAttached = false; // 重置初始化状态,允许重新初始化 _bInit = false; // Driver types supported D3D_DRIVER_TYPE DriverTypes[] = { D3D_DRIVER_TYPE_HARDWARE, D3D_DRIVER_TYPE_WARP, D3D_DRIVER_TYPE_REFERENCE, }; UINT NumDriverTypes = ARRAYSIZE(DriverTypes); // Feature levels supported D3D_FEATURE_LEVEL FeatureLevels[] = { D3D_FEATURE_LEVEL_11_0, D3D_FEATURE_LEVEL_10_1, D3D_FEATURE_LEVEL_10_0, D3D_FEATURE_LEVEL_9_1}; UINT NumFeatureLevels = ARRAYSIZE(FeatureLevels); D3D_FEATURE_LEVEL FeatureLevel; // Create D3D device for (UINT DriverTypeIndex = 0; DriverTypeIndex < NumDriverTypes; ++DriverTypeIndex) { hr = D3D11CreateDevice(nullptr, DriverTypes[DriverTypeIndex], nullptr, 0, FeatureLevels, NumFeatureLevels, D3D11_SDK_VERSION, &_hDevice, &FeatureLevel, &_hContext); if (SUCCEEDED(hr)) { break; } } if (FAILED(hr)) { __DebugPrint("D3D11CreateDevice failed: 0x%08lx", (unsigned long)hr); return false; } // Get DXGI device IDXGIDevice* hDxgiDevice = nullptr; hr = _hDevice->QueryInterface(__uuidof(IDXGIDevice), reinterpret_cast(&hDxgiDevice)); if (FAILED(hr)) { __DebugPrint("QueryInterface IDXGIDevice failed: 0x%08lx", (unsigned long)hr); Free(_hContext, [this]{ _hContext->Release(); }); Free(_hDevice, [this]{ _hDevice->Release(); }); return false; } // Get DXGI adapter IDXGIAdapter* hDxgiAdapter = nullptr; hr = hDxgiDevice->GetParent(__uuidof(IDXGIAdapter), reinterpret_cast(&hDxgiAdapter)); Free(hDxgiDevice, [=] { hDxgiDevice->Release(); }); if (FAILED(hr)) { __DebugPrint("GetParent IDXGIAdapter failed: 0x%08lx", (unsigned long)hr); Free(_hContext, [this]{ _hContext->Release(); }); Free(_hDevice, [this]{ _hDevice->Release(); }); return false; } // Get output IDXGIOutput* hDxgiOutput = nullptr; DXGI_OUTPUT_DESC dxgiOutDesc; ZeroMemory(&dxgiOutDesc, sizeof(dxgiOutDesc)); for (int idx = 0; SUCCEEDED(hr = hDxgiAdapter->EnumOutputs(idx, &hDxgiOutput)); ++idx) { hDxgiOutput->GetDesc(&dxgiOutDesc); if (dxgiOutDesc.DesktopCoordinates.left == left && dxgiOutDesc.DesktopCoordinates.top == top) { break; } } Free(hDxgiAdapter, [=] { hDxgiAdapter->Release(); }); if (FAILED(hr)) { __DebugPrint("EnumOutputs failed: 0x%08lx", (unsigned long)hr); Free(hDxgiOutput, [=]{ hDxgiOutput->Release(); }); Free(_hContext, [this]{ _hContext->Release(); }); Free(_hDevice, [this]{ _hDevice->Release(); }); return false; } // QI for Output 1 IDXGIOutput1* hDxgiOutput1 = nullptr; hr = hDxgiOutput->QueryInterface(__uuidof(hDxgiOutput1), reinterpret_cast(&hDxgiOutput1)); Free(hDxgiOutput, [=] { hDxgiOutput->Release(); }); if (FAILED(hr)) { __DebugPrint("QueryInterface IDXGIOutput1 failed: 0x%08lx", (unsigned long)hr); Free(_hContext, [this]{ _hContext->Release(); }); Free(_hDevice, [this]{ _hDevice->Release(); }); return false; } // Create desktop duplication hr = hDxgiOutput1->DuplicateOutput(_hDevice, &_hDeskDupl); Free(hDxgiOutput1, [=] { hDxgiOutput1->Release(); }); if (FAILED(hr)) { __DebugPrint("DuplicateOutput failed: 0x%08lx", (unsigned long)hr); Free(_hContext, [this]{ _hContext->Release(); }); Free(_hDevice, [this]{ _hDevice->Release(); }); return false; } // Set ColorSpace - 修复RGB到NV12转换的颜色空间设置 D3D11_VIDEO_PROCESSOR_COLOR_SPACE inputColorSpace; inputColorSpace.Usage = 0; // 0 = Playback (更适合屏幕捕获) inputColorSpace.RGB_Range = 0; // 0 = Full range (0-255) for RGB input inputColorSpace.YCbCr_Matrix = 0; // 对于RGB输入,这个值会被忽略,但设为0 (BT.601) inputColorSpace.YCbCr_xvYCC = 0; // 0 = Conventional YCbCr inputColorSpace.Nominal_Range = D3D11_VIDEO_PROCESSOR_NOMINAL_RANGE_0_255; // Full range for RGB D3D11_VIDEO_PROCESSOR_COLOR_SPACE outputColorSpace; outputColorSpace.Usage = 0; // 0 = Playback outputColorSpace.RGB_Range = 0; // 对于YUV输出,这个值会被忽略 outputColorSpace.YCbCr_Matrix = 0; // 0 = BT.601 (标准定义电视) outputColorSpace.YCbCr_xvYCC = 0; // 0 = Conventional YCbCr outputColorSpace.Nominal_Range = D3D11_VIDEO_PROCESSOR_NOMINAL_RANGE_16_235; // Studio range for YUV output qDebug() << "DxgiCapturer::Open: Color space settings - Input: Usage=" << inputColorSpace.Usage << ", RGB_Range=" << inputColorSpace.RGB_Range << ", Nominal_Range=" << inputColorSpace.Nominal_Range << "; Output: Usage=" << outputColorSpace.Usage << ", YCbCr_Matrix=" << outputColorSpace.YCbCr_Matrix << ", Nominal_Range=" << outputColorSpace.Nominal_Range; // 确保RGB到NV12转换器正确初始化 qDebug() << "DxgiCapturer::Open: Initializing RGB to NV12 converter"; if (FAILED(_rgbToNv12.Open(_hDevice, _hContext, inputColorSpace, outputColorSpace))) { qDebug() << "DxgiCapturer::Open: RGB to NV12 converter initialization failed"; Free(_hDeskDupl, [this]{ _hDeskDupl->Release(); }); Free(_hContext, [this]{ _hContext->Release(); }); Free(_hDevice, [this]{ _hDevice->Release(); }); return false; } qDebug() << "DxgiCapturer::Open: RGB to NV12 converter initialized successfully"; _nv12Frame = Frame::Alloc(AV_PIX_FMT_NV12, width, height); _xrgbFrame = Frame::Alloc(AV_PIX_FMT_BGR0, width, height); if (!_nv12Frame) { __DebugPrint("Alloc NV12 frame failed"); Free(_hDeskDupl, [this]{ _hDeskDupl->Release(); }); Free(_hContext, [this]{ _hContext->Release(); }); Free(_hDevice, [this]{ _hDevice->Release(); }); return false; } if (!_xrgbFrame) { __DebugPrint("Alloc XRGB frame failed"); Free(_nv12Frame, [this]{ av_frame_free(&_nv12Frame); }); Free(_hDeskDupl, [this]{ _hDeskDupl->Release(); }); Free(_hContext, [this]{ _hContext->Release(); }); Free(_hDevice, [this]{ _hDevice->Release(); }); return false; } _bInit = true; return true; } AVFrame* DxgiCapturerPrivate::GetFrame( bool shouldDrawCursor, int left, int top, int right, int bottom) { std::lock_guard lock(_deviceMutex); if (!_bInit) return nullptr; _isCaptureSuccess = false; IDXGIResource* hDesktopResource = nullptr; DXGI_OUTDUPL_FRAME_INFO FrameInfo; HRESULT hr = _hDeskDupl->AcquireNextFrame(0, &FrameInfo, &hDesktopResource); if (FAILED(hr)) { if (hr == DXGI_ERROR_WAIT_TIMEOUT) return nullptr; return nullptr; } // query next frame staging buffer ID3D11Texture2D* srcImage = nullptr; hr = hDesktopResource->QueryInterface(__uuidof(ID3D11Texture2D), reinterpret_cast(&srcImage)); Free(hDesktopResource, [=] { hDesktopResource->Release(); }); if (FAILED(hr)) { __DebugPrint("QueryInterface ID3D11Texture2D failed: 0x%08lx", (unsigned long)hr); _hDeskDupl->ReleaseFrame(); return nullptr; } srcImage->GetDesc(&_desc); // create a new staging buffer for fill frame image auto desc = _desc; desc.ArraySize = 1; desc.BindFlags = D3D11_BIND_FLAG::D3D11_BIND_RENDER_TARGET; desc.MiscFlags = D3D11_RESOURCE_MISC_GDI_COMPATIBLE; desc.SampleDesc.Count = 1; desc.SampleDesc.Quality = 0; desc.MipLevels = 1; desc.CPUAccessFlags = 0; desc.Usage = D3D11_USAGE_DEFAULT; hr = _hDevice->CreateTexture2D(&desc, nullptr, &_gdiImage); if (FAILED(hr)) { __DebugPrint("Create _gdiImage failed"); Free(srcImage, [=] { srcImage->Release(); }); _hDeskDupl->ReleaseFrame(); return nullptr; } // copy next staging buffer to new staging buffer _hContext->CopyResource(_gdiImage, srcImage); Free(srcImage, [=] { srcImage->Release(); }); _hDeskDupl->ReleaseFrame(); // create staging buffer for map bits _hStagingSurf = nullptr; hr = _gdiImage->QueryInterface(__uuidof(IDXGISurface), (void**) (&_hStagingSurf)); if (FAILED(hr)) { __DebugPrint("_gdiImage->QueryInterface failed"); Free(_gdiImage, [this] { _gdiImage->Release(); }); return nullptr; } _isCaptureSuccess = true; HDC hdc = nullptr; _hStagingSurf->GetDC(FALSE, &hdc); // 合成鼠标指针 if (hdc && shouldDrawCursor) { drawCursor(hdc, left, top, right, bottom); } // 释放 DC 并转换为 AVFrame if (_isCaptureSuccess) { _isCaptureSuccess = false; _hStagingSurf->ReleaseDC(nullptr); // 创建一个临时的纹理 ID3D11Texture2D* tmpImage = nullptr; _desc.MiscFlags = 2050; hr = _hDevice->CreateTexture2D(&_desc, nullptr, &tmpImage); if (FAILED(hr)) { __DebugPrint("CreateTexture2D tmpImage failed: 0x%08lx", (unsigned long)hr); Free(_hStagingSurf, [this] { _hStagingSurf->Release(); }); Free(_gdiImage, [this] { _gdiImage->Release(); }); return nullptr; } _hContext->CopyResource(tmpImage, _gdiImage); // 首先尝试创建 NV12 纹理,如果失败则重试一次 AVFrame* frame = nullptr; auto tmpFormat = _desc.Format; _desc.Format = DXGI_FORMAT_NV12; bool nv12Success = GenNv12Frame(_hDevice, _hContext, _desc, tmpImage, _nv12Buffers, _nv12Frame, _rgbToNv12); if (!nv12Success) { qDebug() << "DxgiCapturer::GetFrame: First NV12 conversion failed, retrying..."; // 重试一次NV12转换 nv12Success = GenNv12Frame(_hDevice, _hContext, _desc, tmpImage, _nv12Buffers, _nv12Frame, _rgbToNv12); } if (nv12Success) { frame = _nv12Frame; qDebug() << "DxgiCapturer::GetFrame: NV12 conversion successful"; } else { qDebug() << "DxgiCapturer::GetFrame: NV12 conversion failed, falling back to RGB"; _desc.Format = tmpFormat; GenRgbFrame(_hDevice, _hContext, _desc, _gdiImage, _xrgbBuffers, _xrgbFrame); frame = _xrgbFrame; } Free(_hStagingSurf, [this] { _hStagingSurf->Release(); }); Free(tmpImage, [&tmpImage] { tmpImage->Release(); }); Free(_gdiImage, [this] { _gdiImage->Release(); }); return frame; } return nullptr; } void DxgiCapturerPrivate::drawCursor(HDC hdc, int left, int top, int right, int bottom) { CURSORINFO ci; ci.cbSize = sizeof(CURSORINFO); if (!GetCursorInfo(&ci)) { __DebugPrint("GetCursorInfo failed"); return; } int cursorX = ci.ptScreenPos.x; int cursorY = ci.ptScreenPos.y; if (cursorX > right || cursorX < left || cursorY > bottom || cursorY < top) { return; // 超出显示范围 } if (ci.flags == CURSOR_SHOWING) { // 将光标画到屏幕所在位置 int x = cursorX - left; int y = cursorY - top; DrawIconEx(hdc, x, y, ci.hCursor, 0, 0, 0, NULL, DI_NORMAL | DI_COMPAT); } } // --- DxgiCapturer 实现 --- DxgiCapturer::DxgiCapturer() : d(new DxgiCapturerPrivate) {} DxgiCapturer::~DxgiCapturer() { close(); delete d; } bool DxgiCapturer::open(const CaptureTarget& target, int width, int height) { #ifdef PLATFORM_WINDOWS close(); if (target.type != CaptureTargetType::Monitor) return false; auto monitors = MonitorFinder::GetList(); if (target.monitorIdx < 0 || target.monitorIdx >= (int)monitors.size()) return false; auto& monitorInfo = monitors[target.monitorIdx]; m_left = monitorInfo.rect.left; m_top = monitorInfo.rect.top; m_width = monitorInfo.rect.right - monitorInfo.rect.left; m_height = monitorInfo.rect.bottom - monitorInfo.rect.top; m_right = monitorInfo.rect.right; m_bottom = monitorInfo.rect.bottom; return d->Open(m_left, m_top, m_width, m_height); #else return false; #endif } void DxgiCapturerPrivate::CloseInternal() { if (!_bInit) return; qDebug() << "DxgiCapturerPrivate::CloseInternal: Starting cleanup"; _bInit = false; // 清理缓冲区 qDebug() << "DxgiCapturerPrivate::CloseInternal: Clearing buffers"; _nv12Buffers.Clear(); _xrgbBuffers.Clear(); // 释放D3D转换器 qDebug() << "DxgiCapturerPrivate::CloseInternal: Closing D3D converter"; _rgbToNv12.Close(); // 释放帧 qDebug() << "DxgiCapturerPrivate::CloseInternal: Freeing frames"; Free(_nv12Frame, [this] { av_frame_free(&_nv12Frame); }); Free(_xrgbFrame, [this] { av_frame_free(&_xrgbFrame); }); // 释放DXGI资源 qDebug() << "DxgiCapturerPrivate::CloseInternal: Releasing DXGI resources"; Free(_hDeskDupl, [this] { _hDeskDupl->Release(); }); // 最后释放D3D设备和上下文 qDebug() << "DxgiCapturerPrivate::CloseInternal: Releasing D3D resources"; Free(_hContext, [this] { _hContext->Release(); }); Free(_hDevice, [this] { _hDevice->Release(); }); qDebug() << "DxgiCapturerPrivate::CloseInternal: Cleanup completed"; } void DxgiCapturerPrivate::Close() { std::lock_guard lock(_deviceMutex); CloseInternal(); } void DxgiCapturer::close() { #ifdef PLATFORM_WINDOWS d->Close(); #endif } AVFrame* DxgiCapturer::getFrame() { #ifdef PLATFORM_WINDOWS return d->GetFrame(m_drawCursor, m_left, m_top, m_right, m_bottom); #else return nullptr; #endif } } // namespace video } // namespace avrecorder