|
|
@@ -1,20 +1,213 @@
|
|
|
#include "DxgiCapturer.h"
|
|
|
+
|
|
|
+#include "basic/basic.h"
|
|
|
+#include "basic/frame.h"
|
|
|
+#include "d3d/buffer_filler.h"
|
|
|
+#include "d3d/convert.h"
|
|
|
+#include "d3d/gen_frame.h"
|
|
|
+#include "../finder.h"
|
|
|
+
|
|
|
#include <cassert>
|
|
|
+#include <d3d11.h>
|
|
|
+#include <dxgi1_2.h>
|
|
|
|
|
|
namespace avrecorder {
|
|
|
namespace video {
|
|
|
|
|
|
-DxgiCapturer::DxgiCapturer() : d(nullptr) {}
|
|
|
-DxgiCapturer::~DxgiCapturer() { close(); }
|
|
|
+// --- DxgiCapturerPrivate 实现 ---
|
|
|
+bool DxgiCapturerPrivate::Open(int left, int top, int width, int height) {
|
|
|
+ Close();
|
|
|
+ HRESULT hr = S_OK;
|
|
|
+ _isAttached = false;
|
|
|
+ if (_bInit) return false;
|
|
|
+ // Driver types supported
|
|
|
+ D3D_DRIVER_TYPE DriverTypes[] = {
|
|
|
+ D3D_DRIVER_TYPE_HARDWARE,
|
|
|
+ D3D_DRIVER_TYPE_WARP,
|
|
|
+ D3D_DRIVER_TYPE_REFERENCE,
|
|
|
+ };
|
|
|
+ UINT NumDriverTypes = ARRAYSIZE(DriverTypes);
|
|
|
+ // Feature levels supported
|
|
|
+ D3D_FEATURE_LEVEL FeatureLevels[] = {
|
|
|
+ D3D_FEATURE_LEVEL_11_0,
|
|
|
+ D3D_FEATURE_LEVEL_10_1,
|
|
|
+ D3D_FEATURE_LEVEL_10_0,
|
|
|
+ D3D_FEATURE_LEVEL_9_1};
|
|
|
+ UINT NumFeatureLevels = ARRAYSIZE(FeatureLevels);
|
|
|
+ D3D_FEATURE_LEVEL FeatureLevel;
|
|
|
+ // Create D3D device
|
|
|
+ for (UINT DriverTypeIndex = 0; DriverTypeIndex < NumDriverTypes; ++DriverTypeIndex) {
|
|
|
+ hr = D3D11CreateDevice(nullptr,
|
|
|
+ DriverTypes[DriverTypeIndex],
|
|
|
+ nullptr,
|
|
|
+ 0,
|
|
|
+ FeatureLevels,
|
|
|
+ NumFeatureLevels,
|
|
|
+ D3D11_SDK_VERSION,
|
|
|
+ &_hDevice,
|
|
|
+ &FeatureLevel,
|
|
|
+ &_hContext);
|
|
|
+ if (SUCCEEDED(hr)) {
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ __CheckBool(SUCCEEDED(hr));
|
|
|
+ // Get DXGI device
|
|
|
+ IDXGIDevice* hDxgiDevice = nullptr;
|
|
|
+ __CheckBool(SUCCEEDED(_hDevice->QueryInterface(__uuidof(IDXGIDevice), reinterpret_cast<void**>(&hDxgiDevice))));
|
|
|
+ // Get DXGI adapter
|
|
|
+ IDXGIAdapter* hDxgiAdapter = nullptr;
|
|
|
+ hr = hDxgiDevice->GetParent(__uuidof(IDXGIAdapter), reinterpret_cast<void**>(&hDxgiAdapter));
|
|
|
+ Free(hDxgiDevice, [=] { hDxgiDevice->Release(); });
|
|
|
+ __CheckBool(SUCCEEDED(hr));
|
|
|
+ // Get output
|
|
|
+ IDXGIOutput* hDxgiOutput = nullptr;
|
|
|
+ DXGI_OUTPUT_DESC dxgiOutDesc;
|
|
|
+ ZeroMemory(&dxgiOutDesc, sizeof(dxgiOutDesc));
|
|
|
+ for (int idx = 0; SUCCEEDED(hr = hDxgiAdapter->EnumOutputs(idx, &hDxgiOutput)); ++idx) {
|
|
|
+ hDxgiOutput->GetDesc(&dxgiOutDesc);
|
|
|
+ if (dxgiOutDesc.DesktopCoordinates.left == left && dxgiOutDesc.DesktopCoordinates.top == top) {
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ Free(hDxgiAdapter, [=] { hDxgiAdapter->Release(); });
|
|
|
+ __CheckBool(SUCCEEDED(hr));
|
|
|
+ // QI for Output 1
|
|
|
+ IDXGIOutput1* hDxgiOutput1 = nullptr;
|
|
|
+ hr = hDxgiOutput->QueryInterface(__uuidof(hDxgiOutput1), reinterpret_cast<void**>(&hDxgiOutput1));
|
|
|
+ Free(hDxgiOutput, [=] { hDxgiOutput->Release(); });
|
|
|
+ __CheckBool(SUCCEEDED(hr));
|
|
|
+ // Create desktop duplication
|
|
|
+ hr = hDxgiOutput1->DuplicateOutput(_hDevice, &_hDeskDupl);
|
|
|
+ Free(hDxgiOutput1, [=] { hDxgiOutput1->Release(); });
|
|
|
+ __CheckBool(SUCCEEDED(hr));
|
|
|
+ // Set ColorSpace
|
|
|
+ D3D11_VIDEO_PROCESSOR_COLOR_SPACE inputColorSpace;
|
|
|
+ inputColorSpace.Usage = 1;
|
|
|
+ inputColorSpace.RGB_Range = 0;
|
|
|
+ inputColorSpace.YCbCr_Matrix = 1;
|
|
|
+ inputColorSpace.YCbCr_xvYCC = 0;
|
|
|
+ inputColorSpace.Nominal_Range = D3D11_VIDEO_PROCESSOR_NOMINAL_RANGE_0_255;
|
|
|
+ D3D11_VIDEO_PROCESSOR_COLOR_SPACE outputColorSpace;
|
|
|
+ outputColorSpace.Usage = 0;
|
|
|
+ outputColorSpace.RGB_Range = 0;
|
|
|
+ outputColorSpace.YCbCr_Matrix = 1;
|
|
|
+ outputColorSpace.YCbCr_xvYCC = 0;
|
|
|
+ outputColorSpace.Nominal_Range = D3D11_VIDEO_PROCESSOR_NOMINAL_RANGE_16_235;
|
|
|
+ _rgbToNv12.Open(_hDevice, _hContext, inputColorSpace, outputColorSpace);
|
|
|
+ _nv12Frame = Frame<MediaType::VIDEO>::Alloc(AV_PIX_FMT_NV12, width, height);
|
|
|
+ _xrgbFrame = Frame<MediaType::VIDEO>::Alloc(AV_PIX_FMT_BGR0, width, height);
|
|
|
+ __CheckBool(_nv12Frame);
|
|
|
+ __CheckBool(_xrgbFrame);
|
|
|
+ _bInit = true;
|
|
|
+ return true;
|
|
|
+}
|
|
|
+
|
|
|
+void DxgiCapturerPrivate::Close() {
|
|
|
+ if (!_bInit) return;
|
|
|
+ _bInit = false;
|
|
|
+ _nv12Buffers.Clear();
|
|
|
+ _xrgbBuffers.Clear();
|
|
|
+ _rgbToNv12.Close();
|
|
|
+ Free(_nv12Frame, [this] { av_frame_free(&_nv12Frame); });
|
|
|
+ Free(_xrgbFrame, [this] { av_frame_free(&_xrgbFrame); });
|
|
|
+ Free(_hDeskDupl, [this] { _hDeskDupl->Release(); });
|
|
|
+ Free(_hDevice, [this] { _hDevice->Release(); });
|
|
|
+ Free(_hContext, [this] { _hContext->Release(); });
|
|
|
+}
|
|
|
+
|
|
|
+AVFrame* DxgiCapturerPrivate::GetFrame() {
|
|
|
+ if (!_bInit) return nullptr;
|
|
|
+ _isCaptureSuccess = false;
|
|
|
+ IDXGIResource* hDesktopResource = nullptr;
|
|
|
+ DXGI_OUTDUPL_FRAME_INFO FrameInfo;
|
|
|
+ HRESULT hr = _hDeskDupl->AcquireNextFrame(0, &FrameInfo, &hDesktopResource);
|
|
|
+ if (FAILED(hr)) {
|
|
|
+ if (hr == DXGI_ERROR_WAIT_TIMEOUT) return nullptr;
|
|
|
+ return nullptr;
|
|
|
+ }
|
|
|
+ // query next frame staging buffer
|
|
|
+ ID3D11Texture2D* srcImage = nullptr;
|
|
|
+ hr = hDesktopResource->QueryInterface(__uuidof(ID3D11Texture2D), reinterpret_cast<void**>(&srcImage));
|
|
|
+ Free(hDesktopResource, [=] { hDesktopResource->Release(); });
|
|
|
+ __CheckNullptr(SUCCEEDED(hr));
|
|
|
+ srcImage->GetDesc(&_desc);
|
|
|
+ // create a new staging buffer for fill frame image
|
|
|
+ auto desc = _desc;
|
|
|
+ desc.ArraySize = 1;
|
|
|
+ desc.BindFlags = D3D11_BIND_FLAG::D3D11_BIND_RENDER_TARGET;
|
|
|
+ desc.MiscFlags = D3D11_RESOURCE_MISC_GDI_COMPATIBLE;
|
|
|
+ desc.SampleDesc.Count = 1;
|
|
|
+ desc.SampleDesc.Quality = 0;
|
|
|
+ desc.MipLevels = 1;
|
|
|
+ desc.CPUAccessFlags = 0;
|
|
|
+ desc.Usage = D3D11_USAGE_DEFAULT;
|
|
|
+ hr = _hDevice->CreateTexture2D(&desc, nullptr, &_gdiImage);
|
|
|
+ if (FAILED(hr)) {
|
|
|
+ __DebugPrint("Create _gdiImage failed");
|
|
|
+ Free(srcImage, [=] { srcImage->Release(); });
|
|
|
+ Free(_hDeskDupl, [this] { _hDeskDupl->ReleaseFrame(); });
|
|
|
+ return nullptr;
|
|
|
+ }
|
|
|
+ // copy next staging buffer to new staging buffer
|
|
|
+ _hContext->CopyResource(_gdiImage, srcImage);
|
|
|
+ Free(srcImage, [=] { srcImage->Release(); });
|
|
|
+ _hDeskDupl->ReleaseFrame();
|
|
|
+ // create staging buffer for map bits
|
|
|
+ _hStagingSurf = nullptr;
|
|
|
+ hr = _gdiImage->QueryInterface(__uuidof(IDXGISurface), (void**) (&_hStagingSurf));
|
|
|
+ if (FAILED(hr)) {
|
|
|
+ __DebugPrint("_gdiImage->QueryInterface failed");
|
|
|
+ Free(_gdiImage, [this] { _gdiImage->Release(); });
|
|
|
+ return nullptr;
|
|
|
+ }
|
|
|
+ _isCaptureSuccess = true;
|
|
|
+ HDC hdc = nullptr;
|
|
|
+ _hStagingSurf->GetDC(FALSE, &hdc);
|
|
|
+ // 释放 DC 并转换为 AVFrame
|
|
|
+ if (_isCaptureSuccess) {
|
|
|
+ _isCaptureSuccess = false;
|
|
|
+ _hStagingSurf->ReleaseDC(nullptr);
|
|
|
+ // 创建一个临时的纹理
|
|
|
+ ID3D11Texture2D* tmpImage = nullptr;
|
|
|
+ _desc.MiscFlags = 2050;
|
|
|
+ __CheckNullptr(SUCCEEDED(_hDevice->CreateTexture2D(&_desc, nullptr, &tmpImage)));
|
|
|
+ _hContext->CopyResource(tmpImage, _gdiImage);
|
|
|
+ // 首先尝试创建 NV12 纹理
|
|
|
+ AVFrame* frame = nullptr;
|
|
|
+ auto tmpFormat = _desc.Format;
|
|
|
+ _desc.Format = DXGI_FORMAT_NV12;
|
|
|
+ if (GenNv12Frame(_hDevice, _hContext, _desc, tmpImage, _nv12Buffers, _nv12Frame, _rgbToNv12)) {
|
|
|
+ frame = _nv12Frame;
|
|
|
+ } else {
|
|
|
+ _desc.Format = tmpFormat;
|
|
|
+ GenRgbFrame(_hDevice, _hContext, _desc, _gdiImage, _xrgbBuffers, _xrgbFrame);
|
|
|
+ frame = _xrgbFrame;
|
|
|
+ }
|
|
|
+ Free(_hStagingSurf, [this] { _hStagingSurf->Release(); });
|
|
|
+ Free(tmpImage, [&tmpImage] { tmpImage->Release(); });
|
|
|
+ Free(_gdiImage, [this] { _gdiImage->Release(); });
|
|
|
+ return frame;
|
|
|
+ }
|
|
|
+ return nullptr;
|
|
|
+}
|
|
|
+
|
|
|
+// --- DxgiCapturer 实现 ---
|
|
|
+DxgiCapturer::DxgiCapturer() : d(new DxgiCapturerPrivate) {}
|
|
|
+DxgiCapturer::~DxgiCapturer() { close(); delete d; }
|
|
|
|
|
|
bool DxgiCapturer::open(const CaptureTarget& target, int width, int height) {
|
|
|
#ifdef PLATFORM_WINDOWS
|
|
|
close();
|
|
|
if (target.type != CaptureTargetType::Monitor) return false;
|
|
|
- // 这里只做简单示例,实际应根据 monitorIdx 获取坐标
|
|
|
- m_left = 0; m_top = 0; m_width = width; m_height = height;
|
|
|
- // TODO: 初始化 d,创建 D3D 设备等
|
|
|
- return true;
|
|
|
+ auto monitors = MonitorFinder::GetList();
|
|
|
+ if (target.monitorIdx < 0 || target.monitorIdx >= (int)monitors.size()) return false;
|
|
|
+ auto& monitorInfo = monitors[target.monitorIdx];
|
|
|
+ m_left = monitorInfo.rect.left;
|
|
|
+ m_top = monitorInfo.rect.top;
|
|
|
+ m_width = monitorInfo.rect.right - monitorInfo.rect.left;
|
|
|
+ m_height = monitorInfo.rect.bottom - monitorInfo.rect.top;
|
|
|
+ return d->Open(m_left, m_top, m_width, m_height);
|
|
|
#else
|
|
|
return false;
|
|
|
#endif
|
|
|
@@ -22,18 +215,17 @@ bool DxgiCapturer::open(const CaptureTarget& target, int width, int height) {
|
|
|
|
|
|
void DxgiCapturer::close() {
|
|
|
#ifdef PLATFORM_WINDOWS
|
|
|
- // TODO: 释放 d
|
|
|
+ d->Close();
|
|
|
#endif
|
|
|
}
|
|
|
|
|
|
AVFrame* DxgiCapturer::getFrame() {
|
|
|
#ifdef PLATFORM_WINDOWS
|
|
|
- // TODO: 获取帧
|
|
|
- return nullptr;
|
|
|
+ return d->GetFrame();
|
|
|
#else
|
|
|
return nullptr;
|
|
|
#endif
|
|
|
}
|
|
|
|
|
|
} // namespace video
|
|
|
-} // namespace avrecorder
|
|
|
+} // namespace avrecorder
|