DxgiCapturer.cpp 8.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231
  1. #include "DxgiCapturer.h"
  2. #include "basic/basic.h"
  3. #include "basic/frame.h"
  4. #include "d3d/buffer_filler.h"
  5. #include "d3d/convert.h"
  6. #include "d3d/gen_frame.h"
  7. #include "../finder.h"
  8. #include <cassert>
  9. #include <d3d11.h>
  10. #include <dxgi1_2.h>
  11. namespace avrecorder {
  12. namespace video {
  13. // --- DxgiCapturerPrivate 实现 ---
  14. bool DxgiCapturerPrivate::Open(int left, int top, int width, int height) {
  15. Close();
  16. HRESULT hr = S_OK;
  17. _isAttached = false;
  18. if (_bInit) return false;
  19. // Driver types supported
  20. D3D_DRIVER_TYPE DriverTypes[] = {
  21. D3D_DRIVER_TYPE_HARDWARE,
  22. D3D_DRIVER_TYPE_WARP,
  23. D3D_DRIVER_TYPE_REFERENCE,
  24. };
  25. UINT NumDriverTypes = ARRAYSIZE(DriverTypes);
  26. // Feature levels supported
  27. D3D_FEATURE_LEVEL FeatureLevels[] = {
  28. D3D_FEATURE_LEVEL_11_0,
  29. D3D_FEATURE_LEVEL_10_1,
  30. D3D_FEATURE_LEVEL_10_0,
  31. D3D_FEATURE_LEVEL_9_1};
  32. UINT NumFeatureLevels = ARRAYSIZE(FeatureLevels);
  33. D3D_FEATURE_LEVEL FeatureLevel;
  34. // Create D3D device
  35. for (UINT DriverTypeIndex = 0; DriverTypeIndex < NumDriverTypes; ++DriverTypeIndex) {
  36. hr = D3D11CreateDevice(nullptr,
  37. DriverTypes[DriverTypeIndex],
  38. nullptr,
  39. 0,
  40. FeatureLevels,
  41. NumFeatureLevels,
  42. D3D11_SDK_VERSION,
  43. &_hDevice,
  44. &FeatureLevel,
  45. &_hContext);
  46. if (SUCCEEDED(hr)) {
  47. break;
  48. }
  49. }
  50. __CheckBool(SUCCEEDED(hr));
  51. // Get DXGI device
  52. IDXGIDevice* hDxgiDevice = nullptr;
  53. __CheckBool(SUCCEEDED(_hDevice->QueryInterface(__uuidof(IDXGIDevice), reinterpret_cast<void**>(&hDxgiDevice))));
  54. // Get DXGI adapter
  55. IDXGIAdapter* hDxgiAdapter = nullptr;
  56. hr = hDxgiDevice->GetParent(__uuidof(IDXGIAdapter), reinterpret_cast<void**>(&hDxgiAdapter));
  57. Free(hDxgiDevice, [=] { hDxgiDevice->Release(); });
  58. __CheckBool(SUCCEEDED(hr));
  59. // Get output
  60. IDXGIOutput* hDxgiOutput = nullptr;
  61. DXGI_OUTPUT_DESC dxgiOutDesc;
  62. ZeroMemory(&dxgiOutDesc, sizeof(dxgiOutDesc));
  63. for (int idx = 0; SUCCEEDED(hr = hDxgiAdapter->EnumOutputs(idx, &hDxgiOutput)); ++idx) {
  64. hDxgiOutput->GetDesc(&dxgiOutDesc);
  65. if (dxgiOutDesc.DesktopCoordinates.left == left && dxgiOutDesc.DesktopCoordinates.top == top) {
  66. break;
  67. }
  68. }
  69. Free(hDxgiAdapter, [=] { hDxgiAdapter->Release(); });
  70. __CheckBool(SUCCEEDED(hr));
  71. // QI for Output 1
  72. IDXGIOutput1* hDxgiOutput1 = nullptr;
  73. hr = hDxgiOutput->QueryInterface(__uuidof(hDxgiOutput1), reinterpret_cast<void**>(&hDxgiOutput1));
  74. Free(hDxgiOutput, [=] { hDxgiOutput->Release(); });
  75. __CheckBool(SUCCEEDED(hr));
  76. // Create desktop duplication
  77. hr = hDxgiOutput1->DuplicateOutput(_hDevice, &_hDeskDupl);
  78. Free(hDxgiOutput1, [=] { hDxgiOutput1->Release(); });
  79. __CheckBool(SUCCEEDED(hr));
  80. // Set ColorSpace
  81. D3D11_VIDEO_PROCESSOR_COLOR_SPACE inputColorSpace;
  82. inputColorSpace.Usage = 1;
  83. inputColorSpace.RGB_Range = 0;
  84. inputColorSpace.YCbCr_Matrix = 1;
  85. inputColorSpace.YCbCr_xvYCC = 0;
  86. inputColorSpace.Nominal_Range = D3D11_VIDEO_PROCESSOR_NOMINAL_RANGE_0_255;
  87. D3D11_VIDEO_PROCESSOR_COLOR_SPACE outputColorSpace;
  88. outputColorSpace.Usage = 0;
  89. outputColorSpace.RGB_Range = 0;
  90. outputColorSpace.YCbCr_Matrix = 1;
  91. outputColorSpace.YCbCr_xvYCC = 0;
  92. outputColorSpace.Nominal_Range = D3D11_VIDEO_PROCESSOR_NOMINAL_RANGE_16_235;
  93. _rgbToNv12.Open(_hDevice, _hContext, inputColorSpace, outputColorSpace);
  94. _nv12Frame = Frame<MediaType::VIDEO>::Alloc(AV_PIX_FMT_NV12, width, height);
  95. _xrgbFrame = Frame<MediaType::VIDEO>::Alloc(AV_PIX_FMT_BGR0, width, height);
  96. __CheckBool(_nv12Frame);
  97. __CheckBool(_xrgbFrame);
  98. _bInit = true;
  99. return true;
  100. }
  101. void DxgiCapturerPrivate::Close() {
  102. if (!_bInit) return;
  103. _bInit = false;
  104. _nv12Buffers.Clear();
  105. _xrgbBuffers.Clear();
  106. _rgbToNv12.Close();
  107. Free(_nv12Frame, [this] { av_frame_free(&_nv12Frame); });
  108. Free(_xrgbFrame, [this] { av_frame_free(&_xrgbFrame); });
  109. Free(_hDeskDupl, [this] { _hDeskDupl->Release(); });
  110. Free(_hDevice, [this] { _hDevice->Release(); });
  111. Free(_hContext, [this] { _hContext->Release(); });
  112. }
  113. AVFrame* DxgiCapturerPrivate::GetFrame() {
  114. if (!_bInit) return nullptr;
  115. _isCaptureSuccess = false;
  116. IDXGIResource* hDesktopResource = nullptr;
  117. DXGI_OUTDUPL_FRAME_INFO FrameInfo;
  118. HRESULT hr = _hDeskDupl->AcquireNextFrame(0, &FrameInfo, &hDesktopResource);
  119. if (FAILED(hr)) {
  120. if (hr == DXGI_ERROR_WAIT_TIMEOUT) return nullptr;
  121. return nullptr;
  122. }
  123. // query next frame staging buffer
  124. ID3D11Texture2D* srcImage = nullptr;
  125. hr = hDesktopResource->QueryInterface(__uuidof(ID3D11Texture2D), reinterpret_cast<void**>(&srcImage));
  126. Free(hDesktopResource, [=] { hDesktopResource->Release(); });
  127. __CheckNullptr(SUCCEEDED(hr));
  128. srcImage->GetDesc(&_desc);
  129. // create a new staging buffer for fill frame image
  130. auto desc = _desc;
  131. desc.ArraySize = 1;
  132. desc.BindFlags = D3D11_BIND_FLAG::D3D11_BIND_RENDER_TARGET;
  133. desc.MiscFlags = D3D11_RESOURCE_MISC_GDI_COMPATIBLE;
  134. desc.SampleDesc.Count = 1;
  135. desc.SampleDesc.Quality = 0;
  136. desc.MipLevels = 1;
  137. desc.CPUAccessFlags = 0;
  138. desc.Usage = D3D11_USAGE_DEFAULT;
  139. hr = _hDevice->CreateTexture2D(&desc, nullptr, &_gdiImage);
  140. if (FAILED(hr)) {
  141. __DebugPrint("Create _gdiImage failed");
  142. Free(srcImage, [=] { srcImage->Release(); });
  143. Free(_hDeskDupl, [this] { _hDeskDupl->ReleaseFrame(); });
  144. return nullptr;
  145. }
  146. // copy next staging buffer to new staging buffer
  147. _hContext->CopyResource(_gdiImage, srcImage);
  148. Free(srcImage, [=] { srcImage->Release(); });
  149. _hDeskDupl->ReleaseFrame();
  150. // create staging buffer for map bits
  151. _hStagingSurf = nullptr;
  152. hr = _gdiImage->QueryInterface(__uuidof(IDXGISurface), (void**) (&_hStagingSurf));
  153. if (FAILED(hr)) {
  154. __DebugPrint("_gdiImage->QueryInterface failed");
  155. Free(_gdiImage, [this] { _gdiImage->Release(); });
  156. return nullptr;
  157. }
  158. _isCaptureSuccess = true;
  159. HDC hdc = nullptr;
  160. _hStagingSurf->GetDC(FALSE, &hdc);
  161. // 释放 DC 并转换为 AVFrame
  162. if (_isCaptureSuccess) {
  163. _isCaptureSuccess = false;
  164. _hStagingSurf->ReleaseDC(nullptr);
  165. // 创建一个临时的纹理
  166. ID3D11Texture2D* tmpImage = nullptr;
  167. _desc.MiscFlags = 2050;
  168. __CheckNullptr(SUCCEEDED(_hDevice->CreateTexture2D(&_desc, nullptr, &tmpImage)));
  169. _hContext->CopyResource(tmpImage, _gdiImage);
  170. // 首先尝试创建 NV12 纹理
  171. AVFrame* frame = nullptr;
  172. auto tmpFormat = _desc.Format;
  173. _desc.Format = DXGI_FORMAT_NV12;
  174. if (GenNv12Frame(_hDevice, _hContext, _desc, tmpImage, _nv12Buffers, _nv12Frame, _rgbToNv12)) {
  175. frame = _nv12Frame;
  176. } else {
  177. _desc.Format = tmpFormat;
  178. GenRgbFrame(_hDevice, _hContext, _desc, _gdiImage, _xrgbBuffers, _xrgbFrame);
  179. frame = _xrgbFrame;
  180. }
  181. Free(_hStagingSurf, [this] { _hStagingSurf->Release(); });
  182. Free(tmpImage, [&tmpImage] { tmpImage->Release(); });
  183. Free(_gdiImage, [this] { _gdiImage->Release(); });
  184. return frame;
  185. }
  186. return nullptr;
  187. }
  188. // --- DxgiCapturer 实现 ---
  189. DxgiCapturer::DxgiCapturer() : d(new DxgiCapturerPrivate) {}
  190. DxgiCapturer::~DxgiCapturer() { close(); delete d; }
  191. bool DxgiCapturer::open(const CaptureTarget& target, int width, int height) {
  192. #ifdef PLATFORM_WINDOWS
  193. close();
  194. if (target.type != CaptureTargetType::Monitor) return false;
  195. auto monitors = MonitorFinder::GetList();
  196. if (target.monitorIdx < 0 || target.monitorIdx >= (int)monitors.size()) return false;
  197. auto& monitorInfo = monitors[target.monitorIdx];
  198. m_left = monitorInfo.rect.left;
  199. m_top = monitorInfo.rect.top;
  200. m_width = monitorInfo.rect.right - monitorInfo.rect.left;
  201. m_height = monitorInfo.rect.bottom - monitorInfo.rect.top;
  202. return d->Open(m_left, m_top, m_width, m_height);
  203. #else
  204. return false;
  205. #endif
  206. }
  207. void DxgiCapturer::close() {
  208. #ifdef PLATFORM_WINDOWS
  209. d->Close();
  210. #endif
  211. }
  212. AVFrame* DxgiCapturer::getFrame() {
  213. #ifdef PLATFORM_WINDOWS
  214. return d->GetFrame();
  215. #else
  216. return nullptr;
  217. #endif
  218. }
  219. } // namespace video
  220. } // namespace avrecorder